diff --git a/.gitignore b/.gitignore index 102ec03d..eab9d8dd 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,30 @@ cluster/ansible/manifests/inventory.ini apps/gitea-webhook-ambassador/gitea-webhook-ambassador apps/gitea-webhook-ambassador/config.yaml apps/gitea-webhook-ambassador/data/gitea-webhook-ambassador.db -.cursorrules \ No newline at end of file +.cursorrules + +# Python virtual environments +venv/ +env/ +.venv/ +.env/ +test_env/ +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +pip-log.txt +pip-delete-this-directory.txt +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.log +.git +.mypy_cache +.pytest_cache +.hypothesis \ No newline at end of file diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 deleted file mode 100644 index b49d77ba..00000000 --- a/venv/bin/Activate.ps1 +++ /dev/null @@ -1,247 +0,0 @@ -<# -.Synopsis -Activate a Python virtual environment for the current PowerShell session. - -.Description -Pushes the python executable for a virtual environment to the front of the -$Env:PATH environment variable and sets the prompt to signify that you are -in a Python virtual environment. Makes use of the command line switches as -well as the `pyvenv.cfg` file values present in the virtual environment. - -.Parameter VenvDir -Path to the directory that contains the virtual environment to activate. The -default value for this is the parent of the directory that the Activate.ps1 -script is located within. - -.Parameter Prompt -The prompt prefix to display when this virtual environment is activated. By -default, this prompt is the name of the virtual environment folder (VenvDir) -surrounded by parentheses and followed by a single space (ie. '(.venv) '). - -.Example -Activate.ps1 -Activates the Python virtual environment that contains the Activate.ps1 script. - -.Example -Activate.ps1 -Verbose -Activates the Python virtual environment that contains the Activate.ps1 script, -and shows extra information about the activation as it executes. - -.Example -Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv -Activates the Python virtual environment located in the specified location. - -.Example -Activate.ps1 -Prompt "MyPython" -Activates the Python virtual environment that contains the Activate.ps1 script, -and prefixes the current prompt with the specified string (surrounded in -parentheses) while the virtual environment is active. - -.Notes -On Windows, it may be required to enable this Activate.ps1 script by setting the -execution policy for the user. You can do this by issuing the following PowerShell -command: - -PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - -For more information on Execution Policies: -https://go.microsoft.com/fwlink/?LinkID=135170 - -#> -Param( - [Parameter(Mandatory = $false)] - [String] - $VenvDir, - [Parameter(Mandatory = $false)] - [String] - $Prompt -) - -<# Function declarations --------------------------------------------------- #> - -<# -.Synopsis -Remove all shell session elements added by the Activate script, including the -addition of the virtual environment's Python executable from the beginning of -the PATH variable. - -.Parameter NonDestructive -If present, do not remove this function from the global namespace for the -session. - -#> -function global:deactivate ([switch]$NonDestructive) { - # Revert to original values - - # The prior prompt: - if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { - Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt - Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT - } - - # The prior PYTHONHOME: - if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { - Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME - Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME - } - - # The prior PATH: - if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { - Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH - Remove-Item -Path Env:_OLD_VIRTUAL_PATH - } - - # Just remove the VIRTUAL_ENV altogether: - if (Test-Path -Path Env:VIRTUAL_ENV) { - Remove-Item -Path env:VIRTUAL_ENV - } - - # Just remove VIRTUAL_ENV_PROMPT altogether. - if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { - Remove-Item -Path env:VIRTUAL_ENV_PROMPT - } - - # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: - if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { - Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force - } - - # Leave deactivate function in the global namespace if requested: - if (-not $NonDestructive) { - Remove-Item -Path function:deactivate - } -} - -<# -.Description -Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the -given folder, and returns them in a map. - -For each line in the pyvenv.cfg file, if that line can be parsed into exactly -two strings separated by `=` (with any amount of whitespace surrounding the =) -then it is considered a `key = value` line. The left hand string is the key, -the right hand is the value. - -If the value starts with a `'` or a `"` then the first and last character is -stripped from the value before being captured. - -.Parameter ConfigDir -Path to the directory that contains the `pyvenv.cfg` file. -#> -function Get-PyVenvConfig( - [String] - $ConfigDir -) { - Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" - - # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). - $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue - - # An empty map will be returned if no config file is found. - $pyvenvConfig = @{ } - - if ($pyvenvConfigPath) { - - Write-Verbose "File exists, parse `key = value` lines" - $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath - - $pyvenvConfigContent | ForEach-Object { - $keyval = $PSItem -split "\s*=\s*", 2 - if ($keyval[0] -and $keyval[1]) { - $val = $keyval[1] - - # Remove extraneous quotations around a string value. - if ("'""".Contains($val.Substring(0, 1))) { - $val = $val.Substring(1, $val.Length - 2) - } - - $pyvenvConfig[$keyval[0]] = $val - Write-Verbose "Adding Key: '$($keyval[0])'='$val'" - } - } - } - return $pyvenvConfig -} - - -<# Begin Activate script --------------------------------------------------- #> - -# Determine the containing directory of this script -$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$VenvExecDir = Get-Item -Path $VenvExecPath - -Write-Verbose "Activation script is located in path: '$VenvExecPath'" -Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" -Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" - -# Set values required in priority: CmdLine, ConfigFile, Default -# First, get the location of the virtual environment, it might not be -# VenvExecDir if specified on the command line. -if ($VenvDir) { - Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" -} -else { - Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." - $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") - Write-Verbose "VenvDir=$VenvDir" -} - -# Next, read the `pyvenv.cfg` file to determine any required value such -# as `prompt`. -$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir - -# Next, set the prompt from the command line, or the config file, or -# just use the name of the virtual environment folder. -if ($Prompt) { - Write-Verbose "Prompt specified as argument, using '$Prompt'" -} -else { - Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" - if ($pyvenvCfg -and $pyvenvCfg['prompt']) { - Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" - $Prompt = $pyvenvCfg['prompt']; - } - else { - Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" - Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" - $Prompt = Split-Path -Path $venvDir -Leaf - } -} - -Write-Verbose "Prompt = '$Prompt'" -Write-Verbose "VenvDir='$VenvDir'" - -# Deactivate any currently active virtual environment, but leave the -# deactivate function in place. -deactivate -nondestructive - -# Now set the environment variable VIRTUAL_ENV, used by many tools to determine -# that there is an activated venv. -$env:VIRTUAL_ENV = $VenvDir - -if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { - - Write-Verbose "Setting prompt to '$Prompt'" - - # Set the prompt to include the env name - # Make sure _OLD_VIRTUAL_PROMPT is global - function global:_OLD_VIRTUAL_PROMPT { "" } - Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT - New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt - - function global:prompt { - Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " - _OLD_VIRTUAL_PROMPT - } - $env:VIRTUAL_ENV_PROMPT = $Prompt -} - -# Clear PYTHONHOME -if (Test-Path -Path Env:PYTHONHOME) { - Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME - Remove-Item -Path Env:PYTHONHOME -} - -# Add the venv to the PATH -Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH -$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate deleted file mode 100644 index fdc9b08a..00000000 --- a/venv/bin/activate +++ /dev/null @@ -1,70 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# You cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then - PATH="${_OLD_VIRTUAL_PATH:-}" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then - PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # Call hash to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - hash -r 2> /dev/null - - if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then - PS1="${_OLD_VIRTUAL_PS1:-}" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - unset VIRTUAL_ENV_PROMPT - if [ ! "${1:-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -# on Windows, a path can contain colons and backslashes and has to be converted: -if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then - # transform D:\path\to\venv to /d/path/to/venv on MSYS - # and to /cygdrive/d/path/to/venv on Cygwin - export VIRTUAL_ENV=$(cygpath /home/nicolas/freeleaps-ops/venv) -else - # use the path as-is - export VIRTUAL_ENV=/home/nicolas/freeleaps-ops/venv -fi - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/"bin":$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "${PYTHONHOME:-}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1:-}" - PS1='(venv) '"${PS1:-}" - export PS1 - VIRTUAL_ENV_PROMPT='(venv) ' - export VIRTUAL_ENV_PROMPT -fi - -# Call hash to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -hash -r 2> /dev/null diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh deleted file mode 100644 index d6a32fb7..00000000 --- a/venv/bin/activate.csh +++ /dev/null @@ -1,27 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. - -# Created by Davide Di Blasi . -# Ported to Python 3.3 venv by Andrew Svetlov - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV /home/nicolas/freeleaps-ops/venv - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/"bin":$PATH" - - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then - set prompt = '(venv) '"$prompt" - setenv VIRTUAL_ENV_PROMPT '(venv) ' -endif - -alias pydoc python -m pydoc - -rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish deleted file mode 100644 index bb08d0ec..00000000 --- a/venv/bin/activate.fish +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source /bin/activate.fish" *from fish* -# (https://fishshell.com/). You cannot run it directly. - -function deactivate -d "Exit virtual environment and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - set -e _OLD_FISH_PROMPT_OVERRIDE - # prevents error when using nested fish instances (Issue #93858) - if functions -q _old_fish_prompt - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - end - - set -e VIRTUAL_ENV - set -e VIRTUAL_ENV_PROMPT - if test "$argv[1]" != "nondestructive" - # Self-destruct! - functions -e deactivate - end -end - -# Unset irrelevant variables. -deactivate nondestructive - -set -gx VIRTUAL_ENV /home/nicolas/freeleaps-ops/venv - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/"bin $PATH - -# Unset PYTHONHOME if set. -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish uses a function instead of an env var to generate the prompt. - - # Save the current fish_prompt function as the function _old_fish_prompt. - functions -c fish_prompt _old_fish_prompt - - # With the original prompt function renamed, we can override with our own. - function fish_prompt - # Save the return status of the last command. - set -l old_status $status - - # Output the venv prompt; color taken from the blue of the Python logo. - printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal) - - # Restore the return status of the previous command. - echo "exit $old_status" | . - # Output the original/"old" prompt. - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" - set -gx VIRTUAL_ENV_PROMPT '(venv) ' -end diff --git a/venv/bin/beanie b/venv/bin/beanie deleted file mode 100755 index 4d73c5b0..00000000 --- a/venv/bin/beanie +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from beanie.executors.migrate import migrations -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(migrations()) diff --git a/venv/bin/dotenv b/venv/bin/dotenv deleted file mode 100755 index ca5c550f..00000000 --- a/venv/bin/dotenv +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from dotenv.__main__ import cli -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(cli()) diff --git a/venv/bin/email_validator b/venv/bin/email_validator deleted file mode 100755 index 46c59278..00000000 --- a/venv/bin/email_validator +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from email_validator.__main__ import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/fastapi b/venv/bin/fastapi deleted file mode 100755 index 9959b1c1..00000000 --- a/venv/bin/fastapi +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from fastapi.cli import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/httpx b/venv/bin/httpx deleted file mode 100755 index 8d45c9cf..00000000 --- a/venv/bin/httpx +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from httpx import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/normalizer b/venv/bin/normalizer deleted file mode 100755 index 6d983cc2..00000000 --- a/venv/bin/normalizer +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from charset_normalizer import cli -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(cli.cli_detect()) diff --git a/venv/bin/pip b/venv/bin/pip deleted file mode 100755 index cfc780d6..00000000 --- a/venv/bin/pip +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 deleted file mode 100755 index cfc780d6..00000000 --- a/venv/bin/pip3 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pip3.12 b/venv/bin/pip3.12 deleted file mode 100755 index cfc780d6..00000000 --- a/venv/bin/pip3.12 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pyrsa-decrypt b/venv/bin/pyrsa-decrypt deleted file mode 100755 index 1bd9f659..00000000 --- a/venv/bin/pyrsa-decrypt +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from rsa.cli import decrypt -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(decrypt()) diff --git a/venv/bin/pyrsa-encrypt b/venv/bin/pyrsa-encrypt deleted file mode 100755 index 366a69dc..00000000 --- a/venv/bin/pyrsa-encrypt +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from rsa.cli import encrypt -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(encrypt()) diff --git a/venv/bin/pyrsa-keygen b/venv/bin/pyrsa-keygen deleted file mode 100755 index 07da110e..00000000 --- a/venv/bin/pyrsa-keygen +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from rsa.cli import keygen -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(keygen()) diff --git a/venv/bin/pyrsa-priv2pub b/venv/bin/pyrsa-priv2pub deleted file mode 100755 index 7ad7788c..00000000 --- a/venv/bin/pyrsa-priv2pub +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from rsa.util import private_to_public -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(private_to_public()) diff --git a/venv/bin/pyrsa-sign b/venv/bin/pyrsa-sign deleted file mode 100755 index 9de4d7c0..00000000 --- a/venv/bin/pyrsa-sign +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from rsa.cli import sign -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(sign()) diff --git a/venv/bin/pyrsa-verify b/venv/bin/pyrsa-verify deleted file mode 100755 index 2a115a48..00000000 --- a/venv/bin/pyrsa-verify +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from rsa.cli import verify -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(verify()) diff --git a/venv/bin/python b/venv/bin/python deleted file mode 120000 index b8a0adbb..00000000 --- a/venv/bin/python +++ /dev/null @@ -1 +0,0 @@ -python3 \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 deleted file mode 120000 index ae65fdaa..00000000 --- a/venv/bin/python3 +++ /dev/null @@ -1 +0,0 @@ -/usr/bin/python3 \ No newline at end of file diff --git a/venv/bin/python3.12 b/venv/bin/python3.12 deleted file mode 120000 index b8a0adbb..00000000 --- a/venv/bin/python3.12 +++ /dev/null @@ -1 +0,0 @@ -python3 \ No newline at end of file diff --git a/venv/bin/uvicorn b/venv/bin/uvicorn deleted file mode 100755 index f282ae54..00000000 --- a/venv/bin/uvicorn +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/nicolas/freeleaps-ops/venv/bin/python3 -# -*- coding: utf-8 -*- -import re -import sys -from uvicorn.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/lib/python3.12/site-packages/LICENSE b/venv/lib/python3.12/site-packages/LICENSE deleted file mode 100644 index 176437a5..00000000 --- a/venv/lib/python3.12/site-packages/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2022 Roman Right (Korolev) - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc b/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc deleted file mode 100644 index 32d24cde..00000000 Binary files a/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc deleted file mode 100644 index 2b1a4537..00000000 Binary files a/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA deleted file mode 100644 index 3ac05cfd..00000000 --- a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA +++ /dev/null @@ -1,295 +0,0 @@ -Metadata-Version: 2.3 -Name: annotated-types -Version: 0.7.0 -Summary: Reusable constraint types to use with typing.Annotated -Project-URL: Homepage, https://github.com/annotated-types/annotated-types -Project-URL: Source, https://github.com/annotated-types/annotated-types -Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases -Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin , Zac Hatfield-Dodds -License-File: LICENSE -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Console -Classifier: Environment :: MacOS X -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Unix -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -Requires-Python: >=3.8 -Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' -Description-Content-Type: text/markdown - -# annotated-types - -[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) -[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) -[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) -[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) - -[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of -adding context-specific metadata to existing types, and specifies that -`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special -logic for `x`. - -This package provides metadata objects which can be used to represent common -constraints such as upper and lower bounds on scalar values and collection sizes, -a `Predicate` marker for runtime checks, and -descriptions of how we intend these metadata to be interpreted. In some cases, -we also note alternative representations which do not require this package. - -## Install - -```bash -pip install annotated-types -``` - -## Examples - -```python -from typing import Annotated -from annotated_types import Gt, Len, Predicate - -class MyClass: - age: Annotated[int, Gt(18)] # Valid: 19, 20, ... - # Invalid: 17, 18, "19", 19.0, ... - factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... - # Invalid: 4, 8, -2, 5.0, "prime", ... - - my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] - # Invalid: (1, 2), ["abc"], [0] * 20 -``` - -## Documentation - -_While `annotated-types` avoids runtime checks for performance, users should not -construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. -Downstream implementors may choose to raise an error, emit a warning, silently ignore -a metadata item, etc., if the metadata objects described below are used with an -incompatible type - or for any other reason!_ - -### Gt, Ge, Lt, Le - -Express inclusive and/or exclusive bounds on orderable values - which may be numbers, -dates, times, strings, sets, etc. Note that the boundary value need not be of the -same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` -is fine, for example, and implies that the value is an integer x such that `x > 1.5`. - -We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` -as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on -the `annotated-types` package. - -To be explicit, these types have the following meanings: - -* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum -* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum -* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum -* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum - -### Interval - -`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single -metadata object. `None` attributes should be ignored, and non-`None` attributes -treated as per the single bounds above. - -### MultipleOf - -`MultipleOf(multiple_of=x)` might be interpreted in two ways: - -1. Python semantics, implying `value % multiple_of == 0`, or -2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), - where `int(value / multiple_of) == value / multiple_of`. - -We encourage users to be aware of these two common interpretations and their -distinct behaviours, especially since very large or non-integer numbers make -it easy to cause silent data corruption due to floating-point imprecision. - -We encourage libraries to carefully document which interpretation they implement. - -### MinLen, MaxLen, Len - -`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. - -As well as `Len()` which can optionally include upper and lower bounds, we also -provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` -and `Len(max_length=y)` respectively. - -`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. - -Examples of usage: - -* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less -* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less -* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more -* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 -* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 - -#### Changed in v0.4.0 - -* `min_inclusive` has been renamed to `min_length`, no change in meaning -* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** -* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic - meaning of the upper bound in slices vs. `Len` - -See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. - -### Timezone - -`Timezone` can be used with a `datetime` or a `time` to express which timezones -are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. -`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) -expresses that any timezone-aware datetime is allowed. You may also pass a specific -timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) -object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only -allow a specific timezone, though we note that this is often a symptom of fragile design. - -#### Changed in v0.x.x - -* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of - `timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries. - -### Unit - -`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of -a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]` -would be a float representing a velocity in meters per second. - -Please note that `annotated_types` itself makes no attempt to parse or validate -the unit string in any way. That is left entirely to downstream libraries, -such as [`pint`](https://pint.readthedocs.io) or -[`astropy.units`](https://docs.astropy.org/en/stable/units/). - -An example of how a library might use this metadata: - -```python -from annotated_types import Unit -from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args - -# given a type annotated with a unit: -Meters = Annotated[float, Unit("m")] - - -# you can cast the annotation to a specific unit type with any -# callable that accepts a string and returns the desired type -T = TypeVar("T") -def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None: - if get_origin(tp) is Annotated: - for arg in get_args(tp): - if isinstance(arg, Unit): - return unit_cls(arg.unit) - return None - - -# using `pint` -import pint -pint_unit = cast_unit(Meters, pint.Unit) - - -# using `astropy.units` -import astropy.units as u -astropy_unit = cast_unit(Meters, u.Unit) -``` - -### Predicate - -`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. -Users should prefer the statically inspectable metadata above, but if you need -the full power and flexibility of arbitrary runtime predicates... here it is. - -For some common constraints, we provide generic types: - -* `IsLower = Annotated[T, Predicate(str.islower)]` -* `IsUpper = Annotated[T, Predicate(str.isupper)]` -* `IsDigit = Annotated[T, Predicate(str.isdigit)]` -* `IsFinite = Annotated[T, Predicate(math.isfinite)]` -* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]` -* `IsNan = Annotated[T, Predicate(math.isnan)]` -* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]` -* `IsInfinite = Annotated[T, Predicate(math.isinf)]` -* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]` - -so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer -(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`. - -Some libraries might have special logic to handle known or understandable predicates, -for example by checking for `str.isdigit` and using its presence to both call custom -logic to enforce digit-only strings, and customise some generated external schema. -Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in -favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. - -To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner. - -We do not specify what behaviour should be expected for predicates that raise -an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently -skip invalid constraints, or statically raise an error; or it might try calling it -and then propagate or discard the resulting -`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` -exception. We encourage libraries to document the behaviour they choose. - -### Doc - -`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used. - -It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools. - -It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`. - -This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md). - -### Integrating downstream types with `GroupedMetadata` - -Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. -This can help reduce verbosity and cognitive overhead for users. -For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: - -```python -from dataclasses import dataclass -from typing import Iterator -from annotated_types import GroupedMetadata, Ge - -@dataclass -class Field(GroupedMetadata): - ge: int | None = None - description: str | None = None - - def __iter__(self) -> Iterator[object]: - # Iterating over a GroupedMetadata object should yield annotated-types - # constraint metadata objects which describe it as fully as possible, - # and may include other unknown objects too. - if self.ge is not None: - yield Ge(self.ge) - if self.description is not None: - yield Description(self.description) -``` - -Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. - -Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. - -Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. - -### Consuming metadata - -We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). - -It is up to the implementer to determine how this metadata is used. -You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. - -## Design & History - -This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic -and Hypothesis, with the goal of making it as easy as possible for end-users to -provide more informative annotations for use by runtime libraries. - -It is deliberately minimal, and following PEP-593 allows considerable downstream -discretion in what (if anything!) they choose to support. Nonetheless, we expect -that staying simple and covering _only_ the most common use-cases will give users -and maintainers the best experience we can. If you'd like more constraints for your -types - follow our lead, by defining them and documenting them downstream! diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD deleted file mode 100644 index a66e2783..00000000 --- a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046 -annotated_types-0.7.0.dist-info/RECORD,, -annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87 -annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 -annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819 -annotated_types/__pycache__/__init__.cpython-312.pyc,, -annotated_types/__pycache__/test_cases.cpython-312.pyc,, -annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421 diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL deleted file mode 100644 index 516596c7..00000000 --- a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.24.2 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE deleted file mode 100644 index d99323a9..00000000 --- a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2022 the contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/annotated_types/__init__.py b/venv/lib/python3.12/site-packages/annotated_types/__init__.py deleted file mode 100644 index 74e0deea..00000000 --- a/venv/lib/python3.12/site-packages/annotated_types/__init__.py +++ /dev/null @@ -1,432 +0,0 @@ -import math -import sys -import types -from dataclasses import dataclass -from datetime import tzinfo -from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union - -if sys.version_info < (3, 8): - from typing_extensions import Protocol, runtime_checkable -else: - from typing import Protocol, runtime_checkable - -if sys.version_info < (3, 9): - from typing_extensions import Annotated, Literal -else: - from typing import Annotated, Literal - -if sys.version_info < (3, 10): - EllipsisType = type(Ellipsis) - KW_ONLY = {} - SLOTS = {} -else: - from types import EllipsisType - - KW_ONLY = {"kw_only": True} - SLOTS = {"slots": True} - - -__all__ = ( - 'BaseMetadata', - 'GroupedMetadata', - 'Gt', - 'Ge', - 'Lt', - 'Le', - 'Interval', - 'MultipleOf', - 'MinLen', - 'MaxLen', - 'Len', - 'Timezone', - 'Predicate', - 'LowerCase', - 'UpperCase', - 'IsDigits', - 'IsFinite', - 'IsNotFinite', - 'IsNan', - 'IsNotNan', - 'IsInfinite', - 'IsNotInfinite', - 'doc', - 'DocInfo', - '__version__', -) - -__version__ = '0.7.0' - - -T = TypeVar('T') - - -# arguments that start with __ are considered -# positional only -# see https://peps.python.org/pep-0484/#positional-only-arguments - - -class SupportsGt(Protocol): - def __gt__(self: T, __other: T) -> bool: - ... - - -class SupportsGe(Protocol): - def __ge__(self: T, __other: T) -> bool: - ... - - -class SupportsLt(Protocol): - def __lt__(self: T, __other: T) -> bool: - ... - - -class SupportsLe(Protocol): - def __le__(self: T, __other: T) -> bool: - ... - - -class SupportsMod(Protocol): - def __mod__(self: T, __other: T) -> T: - ... - - -class SupportsDiv(Protocol): - def __div__(self: T, __other: T) -> T: - ... - - -class BaseMetadata: - """Base class for all metadata. - - This exists mainly so that implementers - can do `isinstance(..., BaseMetadata)` while traversing field annotations. - """ - - __slots__ = () - - -@dataclass(frozen=True, **SLOTS) -class Gt(BaseMetadata): - """Gt(gt=x) implies that the value must be greater than x. - - It can be used with any type that supports the ``>`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - gt: SupportsGt - - -@dataclass(frozen=True, **SLOTS) -class Ge(BaseMetadata): - """Ge(ge=x) implies that the value must be greater than or equal to x. - - It can be used with any type that supports the ``>=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - ge: SupportsGe - - -@dataclass(frozen=True, **SLOTS) -class Lt(BaseMetadata): - """Lt(lt=x) implies that the value must be less than x. - - It can be used with any type that supports the ``<`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - lt: SupportsLt - - -@dataclass(frozen=True, **SLOTS) -class Le(BaseMetadata): - """Le(le=x) implies that the value must be less than or equal to x. - - It can be used with any type that supports the ``<=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - le: SupportsLe - - -@runtime_checkable -class GroupedMetadata(Protocol): - """A grouping of multiple objects, like typing.Unpack. - - `GroupedMetadata` on its own is not metadata and has no meaning. - All of the constraints and metadata should be fully expressable - in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. - - Concrete implementations should override `GroupedMetadata.__iter__()` - to add their own metadata. - For example: - - >>> @dataclass - >>> class Field(GroupedMetadata): - >>> gt: float | None = None - >>> description: str | None = None - ... - >>> def __iter__(self) -> Iterable[object]: - >>> if self.gt is not None: - >>> yield Gt(self.gt) - >>> if self.description is not None: - >>> yield Description(self.gt) - - Also see the implementation of `Interval` below for an example. - - Parsers should recognize this and unpack it so that it can be used - both with and without unpacking: - - - `Annotated[int, Field(...)]` (parser must unpack Field) - - `Annotated[int, *Field(...)]` (PEP-646) - """ # noqa: trailing-whitespace - - @property - def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: - return True - - def __iter__(self) -> Iterator[object]: - ... - - if not TYPE_CHECKING: - __slots__ = () # allow subclasses to use slots - - def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: - # Basic ABC like functionality without the complexity of an ABC - super().__init_subclass__(*args, **kwargs) - if cls.__iter__ is GroupedMetadata.__iter__: - raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") - - def __iter__(self) -> Iterator[object]: # noqa: F811 - raise NotImplementedError # more helpful than "None has no attribute..." type errors - - -@dataclass(frozen=True, **KW_ONLY, **SLOTS) -class Interval(GroupedMetadata): - """Interval can express inclusive or exclusive bounds with a single object. - - It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which - are interpreted the same way as the single-bound constraints. - """ - - gt: Union[SupportsGt, None] = None - ge: Union[SupportsGe, None] = None - lt: Union[SupportsLt, None] = None - le: Union[SupportsLe, None] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack an Interval into zero or more single-bounds.""" - if self.gt is not None: - yield Gt(self.gt) - if self.ge is not None: - yield Ge(self.ge) - if self.lt is not None: - yield Lt(self.lt) - if self.le is not None: - yield Le(self.le) - - -@dataclass(frozen=True, **SLOTS) -class MultipleOf(BaseMetadata): - """MultipleOf(multiple_of=x) might be interpreted in two ways: - - 1. Python semantics, implying ``value % multiple_of == 0``, or - 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` - - We encourage users to be aware of these two common interpretations, - and libraries to carefully document which they implement. - """ - - multiple_of: Union[SupportsDiv, SupportsMod] - - -@dataclass(frozen=True, **SLOTS) -class MinLen(BaseMetadata): - """ - MinLen() implies minimum inclusive length, - e.g. ``len(value) >= min_length``. - """ - - min_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class MaxLen(BaseMetadata): - """ - MaxLen() implies maximum inclusive length, - e.g. ``len(value) <= max_length``. - """ - - max_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class Len(GroupedMetadata): - """ - Len() implies that ``min_length <= len(value) <= max_length``. - - Upper bound may be omitted or ``None`` to indicate no upper length bound. - """ - - min_length: Annotated[int, Ge(0)] = 0 - max_length: Optional[Annotated[int, Ge(0)]] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack a Len into zone or more single-bounds.""" - if self.min_length > 0: - yield MinLen(self.min_length) - if self.max_length is not None: - yield MaxLen(self.max_length) - - -@dataclass(frozen=True, **SLOTS) -class Timezone(BaseMetadata): - """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). - - ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. - ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be - tz-aware but any timezone is allowed. - - You may also pass a specific timezone string or tzinfo object such as - ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that - you only allow a specific timezone, though we note that this is often - a symptom of poor design. - """ - - tz: Union[str, tzinfo, EllipsisType, None] - - -@dataclass(frozen=True, **SLOTS) -class Unit(BaseMetadata): - """Indicates that the value is a physical quantity with the specified unit. - - It is intended for usage with numeric types, where the value represents the - magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]`` - or ``speed: Annotated[float, Unit('m/s')]``. - - Interpretation of the unit string is left to the discretion of the consumer. - It is suggested to follow conventions established by python libraries that work - with physical quantities, such as - - - ``pint`` : - - ``astropy.units``: - - For indicating a quantity with a certain dimensionality but without a specific unit - it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`. - Note, however, ``annotated_types`` itself makes no use of the unit string. - """ - - unit: str - - -@dataclass(frozen=True, **SLOTS) -class Predicate(BaseMetadata): - """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. - - Users should prefer statically inspectable metadata, but if you need the full - power and flexibility of arbitrary runtime predicates... here it is. - - We provide a few predefined predicates for common string constraints: - ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and - ``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which - can be given special handling, and avoid indirection like ``lambda s: s.lower()``. - - Some libraries might have special logic to handle certain predicates, e.g. by - checking for `str.isdigit` and using its presence to both call custom logic to - enforce digit-only strings, and customise some generated external schema. - - We do not specify what behaviour should be expected for predicates that raise - an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently - skip invalid constraints, or statically raise an error; or it might try calling it - and then propagate or discard the resulting exception. - """ - - func: Callable[[Any], bool] - - def __repr__(self) -> str: - if getattr(self.func, "__name__", "") == "": - return f"{self.__class__.__name__}({self.func!r})" - if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and ( - namespace := getattr(self.func.__self__, "__name__", None) - ): - return f"{self.__class__.__name__}({namespace}.{self.func.__name__})" - if isinstance(self.func, type(str.isascii)): # method descriptor - return f"{self.__class__.__name__}({self.func.__qualname__})" - return f"{self.__class__.__name__}({self.func.__name__})" - - -@dataclass -class Not: - func: Callable[[Any], bool] - - def __call__(self, __v: Any) -> bool: - return not self.func(__v) - - -_StrType = TypeVar("_StrType", bound=str) - -LowerCase = Annotated[_StrType, Predicate(str.islower)] -""" -Return True if the string is a lowercase string, False otherwise. - -A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. -""" # noqa: E501 -UpperCase = Annotated[_StrType, Predicate(str.isupper)] -""" -Return True if the string is an uppercase string, False otherwise. - -A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. -""" # noqa: E501 -IsDigit = Annotated[_StrType, Predicate(str.isdigit)] -IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63 -""" -Return True if the string is a digit string, False otherwise. - -A string is a digit string if all characters in the string are digits and there is at least one character in the string. -""" # noqa: E501 -IsAscii = Annotated[_StrType, Predicate(str.isascii)] -""" -Return True if all characters in the string are ASCII, False otherwise. - -ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. -""" - -_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) -IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] -"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" -IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] -"""Return True if x is one of infinity or NaN, and False otherwise""" -IsNan = Annotated[_NumericType, Predicate(math.isnan)] -"""Return True if x is a NaN (not a number), and False otherwise.""" -IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] -"""Return True if x is anything but NaN (not a number), and False otherwise.""" -IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] -"""Return True if x is a positive or negative infinity, and False otherwise.""" -IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] -"""Return True if x is neither a positive or negative infinity, and False otherwise.""" - -try: - from typing_extensions import DocInfo, doc # type: ignore [attr-defined] -except ImportError: - - @dataclass(frozen=True, **SLOTS) - class DocInfo: # type: ignore [no-redef] - """ " - The return value of doc(), mainly to be used by tools that want to extract the - Annotated documentation at runtime. - """ - - documentation: str - """The documentation string passed to doc().""" - - def doc( - documentation: str, - ) -> DocInfo: - """ - Add documentation to a type annotation inside of Annotated. - - For example: - - >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... - """ - return DocInfo(documentation) diff --git a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index bda8a169..00000000 Binary files a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc deleted file mode 100644 index 18423690..00000000 Binary files a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/annotated_types/py.typed b/venv/lib/python3.12/site-packages/annotated_types/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/annotated_types/test_cases.py b/venv/lib/python3.12/site-packages/annotated_types/test_cases.py deleted file mode 100644 index d9164d68..00000000 --- a/venv/lib/python3.12/site-packages/annotated_types/test_cases.py +++ /dev/null @@ -1,151 +0,0 @@ -import math -import sys -from datetime import date, datetime, timedelta, timezone -from decimal import Decimal -from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple - -if sys.version_info < (3, 9): - from typing_extensions import Annotated -else: - from typing import Annotated - -import annotated_types as at - - -class Case(NamedTuple): - """ - A test case for `annotated_types`. - """ - - annotation: Any - valid_cases: Iterable[Any] - invalid_cases: Iterable[Any] - - -def cases() -> Iterable[Case]: - # Gt, Ge, Lt, Le - yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Gt(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(date(2000, 1, 1))], - [date(2000, 1, 2), date(2000, 1, 3)], - [date(2000, 1, 1), date(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(Decimal('1.123'))], - [Decimal('1.1231'), Decimal('123')], - [Decimal('1.123'), Decimal('0')], - ) - - yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) - yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Ge(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(1998, 1, 1), datetime(1999, 12, 31)], - ) - - yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) - yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Lt(datetime(2000, 1, 1))], - [datetime(1999, 12, 31), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) - yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Le(datetime(2000, 1, 1))], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - # Interval - yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) - yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) - yield Case( - Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(2000, 1, 4)], - ) - - yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) - yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) - - # lengths - - yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - - yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - - yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) - yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) - - yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) - yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) - yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) - - # Timezone - - yield Case( - Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] - ) - yield Case( - Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] - ) - yield Case( - Annotated[datetime, at.Timezone(timezone.utc)], - [datetime(2000, 1, 1, tzinfo=timezone.utc)], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - yield Case( - Annotated[datetime, at.Timezone('Europe/London')], - [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - - # Quantity - - yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m')) - - # predicate types - - yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) - yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) - yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2']) - yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) - - yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) - - yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) - yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) - yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) - yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) - yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) - yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) - - # check stacked predicates - yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) - - # doc - yield Case(Annotated[int, at.doc("A number")], [1, 2], []) - - # custom GroupedMetadata - class MyCustomGroupedMetadata(at.GroupedMetadata): - def __iter__(self) -> Iterator[at.Predicate]: - yield at.Predicate(lambda x: float(x).is_integer()) - - yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/LICENSE b/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/LICENSE deleted file mode 100644 index 104eebf5..00000000 --- a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Alex Grönholm - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/METADATA deleted file mode 100644 index 9d87e1db..00000000 --- a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/METADATA +++ /dev/null @@ -1,105 +0,0 @@ -Metadata-Version: 2.2 -Name: anyio -Version: 4.9.0 -Summary: High level compatibility layer for multiple asynchronous event loop implementations -Author-email: Alex Grönholm -License: MIT -Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ -Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html -Project-URL: Source code, https://github.com/agronholm/anyio -Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Framework :: AnyIO -Classifier: Typing :: Typed -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=3.9 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11" -Requires-Dist: idna>=2.8 -Requires-Dist: sniffio>=1.1 -Requires-Dist: typing_extensions>=4.5; python_version < "3.13" -Provides-Extra: trio -Requires-Dist: trio>=0.26.1; extra == "trio" -Provides-Extra: test -Requires-Dist: anyio[trio]; extra == "test" -Requires-Dist: blockbuster>=1.5.23; extra == "test" -Requires-Dist: coverage[toml]>=7; extra == "test" -Requires-Dist: exceptiongroup>=1.2.0; extra == "test" -Requires-Dist: hypothesis>=4.0; extra == "test" -Requires-Dist: psutil>=5.9; extra == "test" -Requires-Dist: pytest>=7.0; extra == "test" -Requires-Dist: trustme; extra == "test" -Requires-Dist: truststore>=0.9.1; python_version >= "3.10" and extra == "test" -Requires-Dist: uvloop>=0.21; (platform_python_implementation == "CPython" and platform_system != "Windows" and python_version < "3.14") and extra == "test" -Provides-Extra: doc -Requires-Dist: packaging; extra == "doc" -Requires-Dist: Sphinx~=8.2; extra == "doc" -Requires-Dist: sphinx_rtd_theme; extra == "doc" -Requires-Dist: sphinx-autodoc-typehints>=1.2.0; extra == "doc" - -.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg - :target: https://github.com/agronholm/anyio/actions/workflows/test.yml - :alt: Build Status -.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master - :target: https://coveralls.io/github/agronholm/anyio?branch=master - :alt: Code Coverage -.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest - :target: https://anyio.readthedocs.io/en/latest/?badge=latest - :alt: Documentation -.. image:: https://badges.gitter.im/gitterHQ/gitter.svg - :target: https://gitter.im/python-trio/AnyIO - :alt: Gitter chat - -AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or -trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony -with the native SC of trio itself. - -Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or -trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full -refactoring necessary. It will blend in with the native libraries of your chosen backend. - -Documentation -------------- - -View full documentation at: https://anyio.readthedocs.io/ - -Features --------- - -AnyIO offers the following functionality: - -* Task groups (nurseries_ in trio terminology) -* High-level networking (TCP, UDP and UNIX sockets) - - * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python - 3.8) - * async/await style UDP sockets (unlike asyncio where you still have to use Transports and - Protocols) - -* A versatile API for byte streams and object streams -* Inter-task synchronization and communication (locks, conditions, events, semaphores, object - streams) -* Worker threads -* Subprocesses -* Asynchronous file I/O (using worker threads) -* Signal handling - -AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. -It even works with the popular Hypothesis_ library. - -.. _asyncio: https://docs.python.org/3/library/asyncio.html -.. _trio: https://github.com/python-trio/trio -.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency -.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning -.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs -.. _pytest: https://docs.pytest.org/en/latest/ -.. _Hypothesis: https://hypothesis.works/ diff --git a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/RECORD deleted file mode 100644 index 40eedc85..00000000 --- a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/RECORD +++ /dev/null @@ -1,88 +0,0 @@ -anyio-4.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -anyio-4.9.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 -anyio-4.9.0.dist-info/METADATA,sha256=vvkWPXXTbrpTCFK7zdcYwQcSQhx6Q4qITM9t_PEQCrY,4682 -anyio-4.9.0.dist-info/RECORD,, -anyio-4.9.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91 -anyio-4.9.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 -anyio-4.9.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 -anyio/__init__.py,sha256=t8bZuNXa5ncwXBaNKbv48BDgZt48RT_zCEtrnPmjNU8,4993 -anyio/__pycache__/__init__.cpython-312.pyc,, -anyio/__pycache__/from_thread.cpython-312.pyc,, -anyio/__pycache__/lowlevel.cpython-312.pyc,, -anyio/__pycache__/pytest_plugin.cpython-312.pyc,, -anyio/__pycache__/to_interpreter.cpython-312.pyc,, -anyio/__pycache__/to_process.cpython-312.pyc,, -anyio/__pycache__/to_thread.cpython-312.pyc,, -anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/_backends/__pycache__/__init__.cpython-312.pyc,, -anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,, -anyio/_backends/__pycache__/_trio.cpython-312.pyc,, -anyio/_backends/_asyncio.py,sha256=AT1oaTfCE-9YFxooMlvld2yDqY5U2A-ANMcBDh9eRfI,93455 -anyio/_backends/_trio.py,sha256=HVfDqRGQ7Xj3JfTcYdgzmC7pZEplqU4NOO5kxNNSZnk,40429 -anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/_core/__pycache__/__init__.cpython-312.pyc,, -anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc,, -anyio/_core/__pycache__/_eventloop.cpython-312.pyc,, -anyio/_core/__pycache__/_exceptions.cpython-312.pyc,, -anyio/_core/__pycache__/_fileio.cpython-312.pyc,, -anyio/_core/__pycache__/_resources.cpython-312.pyc,, -anyio/_core/__pycache__/_signals.cpython-312.pyc,, -anyio/_core/__pycache__/_sockets.cpython-312.pyc,, -anyio/_core/__pycache__/_streams.cpython-312.pyc,, -anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,, -anyio/_core/__pycache__/_synchronization.cpython-312.pyc,, -anyio/_core/__pycache__/_tasks.cpython-312.pyc,, -anyio/_core/__pycache__/_tempfile.cpython-312.pyc,, -anyio/_core/__pycache__/_testing.cpython-312.pyc,, -anyio/_core/__pycache__/_typedattr.cpython-312.pyc,, -anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626 -anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695 -anyio/_core/_exceptions.py,sha256=RlPRlwastdmfDPoskdXNO6SI8_l3fclA2wtW6cokU9I,3503 -anyio/_core/_fileio.py,sha256=qFZhkLIz0cGXluvih_vcPUTucgq8UFVgsTCtYbijZIg,23340 -anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 -anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905 -anyio/_core/_sockets.py,sha256=5Okc_UThGDEN9KCnsIhqWPRHBNuSy6b4NmG1i51TVF4,27150 -anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804 -anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047 -anyio/_core/_synchronization.py,sha256=DwUh8Tl6cG_UMVC_GyzPoC_U9BpfDfjMl9SINSxcZN4,20320 -anyio/_core/_tasks.py,sha256=f3CuWwo06cCZ6jaOv-JHFKWkgpgf2cvaF25Oh4augMA,4757 -anyio/_core/_tempfile.py,sha256=s-_ucacXbxBH5Bo5eo65lN0lPwZQd5B8yNN_9nARpCM,19696 -anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118 -anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508 -anyio/abc/__init__.py,sha256=c2OQbTCS_fQowviMXanLPh8m29ccwkXmpDr7uyNZYOo,2652 -anyio/abc/__pycache__/__init__.cpython-312.pyc,, -anyio/abc/__pycache__/_eventloop.cpython-312.pyc,, -anyio/abc/__pycache__/_resources.cpython-312.pyc,, -anyio/abc/__pycache__/_sockets.cpython-312.pyc,, -anyio/abc/__pycache__/_streams.cpython-312.pyc,, -anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,, -anyio/abc/__pycache__/_tasks.cpython-312.pyc,, -anyio/abc/__pycache__/_testing.cpython-312.pyc,, -anyio/abc/_eventloop.py,sha256=UmL8DZCvQTgxzmyBZcGm9kWj9VQY8BMWueLh5S8yWN4,9682 -anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783 -anyio/abc/_sockets.py,sha256=KhWtJxan8jpBXKwPaFeQzI4iRXdFaOIn0HXtDZnaO7U,6262 -anyio/abc/_streams.py,sha256=He_JpkAW2g5veOzcUq0XsRC2nId_i35L-d8cs7Uj1ZQ,6598 -anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 -anyio/abc/_tasks.py,sha256=yJWbMwowvqjlAX4oJ3l9Is1w-zwynr2lX1Z02AWJqsY,3080 -anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821 -anyio/from_thread.py,sha256=MbXHZpgM9wgsRkbGhMNMomEGYj7Y_QYq6a5BZ3c5Ev8,17478 -anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169 -anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/pytest_plugin.py,sha256=qXNwk9Pa7hPQKWocgLl9qijqKGMkGzdH2wJa-jPkGUM,9375 -anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/streams/__pycache__/__init__.cpython-312.pyc,, -anyio/streams/__pycache__/buffered.cpython-312.pyc,, -anyio/streams/__pycache__/file.cpython-312.pyc,, -anyio/streams/__pycache__/memory.cpython-312.pyc,, -anyio/streams/__pycache__/stapled.cpython-312.pyc,, -anyio/streams/__pycache__/text.cpython-312.pyc,, -anyio/streams/__pycache__/tls.cpython-312.pyc,, -anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500 -anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383 -anyio/streams/memory.py,sha256=o1OVVx0OooteTTe2GytJreum93Ucuw5s4cAsr3X0-Ag,10560 -anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302 -anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094 -anyio/streams/tls.py,sha256=HxzpVmUgo8SUSIBass_lvef1pAI1uRSrnysM3iEGzl4,13199 -anyio/to_interpreter.py,sha256=UhuNCIucCRN7ZtyJg35Mlamzs1JpgDvK4xnL4TDWrAo,6527 -anyio/to_process.py,sha256=ZvruelRM-HNmqDaql4sdNODg2QD_uSlwSCxnV4OhsfQ,9595 -anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396 diff --git a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/WHEEL deleted file mode 100644 index 9c3ae630..00000000 --- a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (76.0.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/entry_points.txt deleted file mode 100644 index 44dd9bdc..00000000 --- a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[pytest11] -anyio = anyio.pytest_plugin diff --git a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/top_level.txt deleted file mode 100644 index c77c069e..00000000 --- a/venv/lib/python3.12/site-packages/anyio-4.9.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -anyio diff --git a/venv/lib/python3.12/site-packages/anyio/__init__.py b/venv/lib/python3.12/site-packages/anyio/__init__.py deleted file mode 100644 index 578cda6f..00000000 --- a/venv/lib/python3.12/site-packages/anyio/__init__.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import annotations - -from ._core._eventloop import current_time as current_time -from ._core._eventloop import get_all_backends as get_all_backends -from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class -from ._core._eventloop import run as run -from ._core._eventloop import sleep as sleep -from ._core._eventloop import sleep_forever as sleep_forever -from ._core._eventloop import sleep_until as sleep_until -from ._core._exceptions import BrokenResourceError as BrokenResourceError -from ._core._exceptions import BrokenWorkerIntepreter as BrokenWorkerIntepreter -from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess -from ._core._exceptions import BusyResourceError as BusyResourceError -from ._core._exceptions import ClosedResourceError as ClosedResourceError -from ._core._exceptions import DelimiterNotFound as DelimiterNotFound -from ._core._exceptions import EndOfStream as EndOfStream -from ._core._exceptions import IncompleteRead as IncompleteRead -from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError -from ._core._exceptions import WouldBlock as WouldBlock -from ._core._fileio import AsyncFile as AsyncFile -from ._core._fileio import Path as Path -from ._core._fileio import open_file as open_file -from ._core._fileio import wrap_file as wrap_file -from ._core._resources import aclose_forcefully as aclose_forcefully -from ._core._signals import open_signal_receiver as open_signal_receiver -from ._core._sockets import connect_tcp as connect_tcp -from ._core._sockets import connect_unix as connect_unix -from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket -from ._core._sockets import ( - create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, -) -from ._core._sockets import create_tcp_listener as create_tcp_listener -from ._core._sockets import create_udp_socket as create_udp_socket -from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket -from ._core._sockets import create_unix_listener as create_unix_listener -from ._core._sockets import getaddrinfo as getaddrinfo -from ._core._sockets import getnameinfo as getnameinfo -from ._core._sockets import wait_readable as wait_readable -from ._core._sockets import wait_socket_readable as wait_socket_readable -from ._core._sockets import wait_socket_writable as wait_socket_writable -from ._core._sockets import wait_writable as wait_writable -from ._core._streams import create_memory_object_stream as create_memory_object_stream -from ._core._subprocesses import open_process as open_process -from ._core._subprocesses import run_process as run_process -from ._core._synchronization import CapacityLimiter as CapacityLimiter -from ._core._synchronization import ( - CapacityLimiterStatistics as CapacityLimiterStatistics, -) -from ._core._synchronization import Condition as Condition -from ._core._synchronization import ConditionStatistics as ConditionStatistics -from ._core._synchronization import Event as Event -from ._core._synchronization import EventStatistics as EventStatistics -from ._core._synchronization import Lock as Lock -from ._core._synchronization import LockStatistics as LockStatistics -from ._core._synchronization import ResourceGuard as ResourceGuard -from ._core._synchronization import Semaphore as Semaphore -from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics -from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED -from ._core._tasks import CancelScope as CancelScope -from ._core._tasks import create_task_group as create_task_group -from ._core._tasks import current_effective_deadline as current_effective_deadline -from ._core._tasks import fail_after as fail_after -from ._core._tasks import move_on_after as move_on_after -from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile -from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile -from ._core._tempfile import TemporaryDirectory as TemporaryDirectory -from ._core._tempfile import TemporaryFile as TemporaryFile -from ._core._tempfile import gettempdir as gettempdir -from ._core._tempfile import gettempdirb as gettempdirb -from ._core._tempfile import mkdtemp as mkdtemp -from ._core._tempfile import mkstemp as mkstemp -from ._core._testing import TaskInfo as TaskInfo -from ._core._testing import get_current_task as get_current_task -from ._core._testing import get_running_tasks as get_running_tasks -from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked -from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider -from ._core._typedattr import TypedAttributeSet as TypedAttributeSet -from ._core._typedattr import typed_attribute as typed_attribute - -# Re-export imports so they look like they live directly in this package -for __value in list(locals().values()): - if getattr(__value, "__module__", "").startswith("anyio."): - __value.__module__ = __name__ - -del __value diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 90cd3b46..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc deleted file mode 100644 index 522865db..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc deleted file mode 100644 index 9c0cfa3a..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc deleted file mode 100644 index 75a3a121..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc deleted file mode 100644 index 7adfcdc7..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc deleted file mode 100644 index 5d305d80..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc deleted file mode 100644 index 7363ac01..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py b/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index a2aba2b5..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc deleted file mode 100644 index 1954ed6f..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc deleted file mode 100644 index 2d247b78..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py b/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py deleted file mode 100644 index ed91f404..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py +++ /dev/null @@ -1,2816 +0,0 @@ -from __future__ import annotations - -import array -import asyncio -import concurrent.futures -import contextvars -import math -import os -import socket -import sys -import threading -import weakref -from asyncio import ( - AbstractEventLoop, - CancelledError, - all_tasks, - create_task, - current_task, - get_running_loop, - sleep, -) -from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] -from collections import OrderedDict, deque -from collections.abc import ( - AsyncGenerator, - AsyncIterator, - Awaitable, - Callable, - Collection, - Coroutine, - Iterable, - Sequence, -) -from concurrent.futures import Future -from contextlib import AbstractContextManager, suppress -from contextvars import Context, copy_context -from dataclasses import dataclass -from functools import partial, wraps -from inspect import ( - CORO_RUNNING, - CORO_SUSPENDED, - getcoroutinestate, - iscoroutine, -) -from io import IOBase -from os import PathLike -from queue import Queue -from signal import Signals -from socket import AddressFamily, SocketKind -from threading import Thread -from types import CodeType, TracebackType -from typing import ( - IO, - TYPE_CHECKING, - Any, - Optional, - TypeVar, - cast, -) -from weakref import WeakKeyDictionary - -import sniffio - -from .. import ( - CapacityLimiterStatistics, - EventStatistics, - LockStatistics, - TaskInfo, - abc, -) -from .._core._eventloop import claim_worker_thread, threadlocals -from .._core._exceptions import ( - BrokenResourceError, - BusyResourceError, - ClosedResourceError, - EndOfStream, - WouldBlock, - iterate_exceptions, -) -from .._core._sockets import convert_ipv6_sockaddr -from .._core._streams import create_memory_object_stream -from .._core._synchronization import ( - CapacityLimiter as BaseCapacityLimiter, -) -from .._core._synchronization import Event as BaseEvent -from .._core._synchronization import Lock as BaseLock -from .._core._synchronization import ( - ResourceGuard, - SemaphoreStatistics, -) -from .._core._synchronization import Semaphore as BaseSemaphore -from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import ( - AsyncBackend, - IPSockAddrType, - SocketListener, - UDPPacketType, - UNIXDatagramPacketType, -) -from ..abc._eventloop import StrOrBytesPath -from ..lowlevel import RunVar -from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream - -if TYPE_CHECKING: - from _typeshed import FileDescriptorLike -else: - FileDescriptorLike = object - -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - -if sys.version_info >= (3, 11): - from asyncio import Runner - from typing import TypeVarTuple, Unpack -else: - import contextvars - import enum - import signal - from asyncio import coroutines, events, exceptions, tasks - - from exceptiongroup import BaseExceptionGroup - from typing_extensions import TypeVarTuple, Unpack - - class _State(enum.Enum): - CREATED = "created" - INITIALIZED = "initialized" - CLOSED = "closed" - - class Runner: - # Copied from CPython 3.11 - def __init__( - self, - *, - debug: bool | None = None, - loop_factory: Callable[[], AbstractEventLoop] | None = None, - ): - self._state = _State.CREATED - self._debug = debug - self._loop_factory = loop_factory - self._loop: AbstractEventLoop | None = None - self._context = None - self._interrupt_count = 0 - self._set_event_loop = False - - def __enter__(self) -> Runner: - self._lazy_init() - return self - - def __exit__( - self, - exc_type: type[BaseException], - exc_val: BaseException, - exc_tb: TracebackType, - ) -> None: - self.close() - - def close(self) -> None: - """Shutdown and close event loop.""" - if self._state is not _State.INITIALIZED: - return - try: - loop = self._loop - _cancel_all_tasks(loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - if hasattr(loop, "shutdown_default_executor"): - loop.run_until_complete(loop.shutdown_default_executor()) - else: - loop.run_until_complete(_shutdown_default_executor(loop)) - finally: - if self._set_event_loop: - events.set_event_loop(None) - loop.close() - self._loop = None - self._state = _State.CLOSED - - def get_loop(self) -> AbstractEventLoop: - """Return embedded event loop.""" - self._lazy_init() - return self._loop - - def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: - """Run a coroutine inside the embedded event loop.""" - if not coroutines.iscoroutine(coro): - raise ValueError(f"a coroutine was expected, got {coro!r}") - - if events._get_running_loop() is not None: - # fail fast with short traceback - raise RuntimeError( - "Runner.run() cannot be called from a running event loop" - ) - - self._lazy_init() - - if context is None: - context = self._context - task = context.run(self._loop.create_task, coro) - - if ( - threading.current_thread() is threading.main_thread() - and signal.getsignal(signal.SIGINT) is signal.default_int_handler - ): - sigint_handler = partial(self._on_sigint, main_task=task) - try: - signal.signal(signal.SIGINT, sigint_handler) - except ValueError: - # `signal.signal` may throw if `threading.main_thread` does - # not support signals (e.g. embedded interpreter with signals - # not registered - see gh-91880) - sigint_handler = None - else: - sigint_handler = None - - self._interrupt_count = 0 - try: - return self._loop.run_until_complete(task) - except exceptions.CancelledError: - if self._interrupt_count > 0: - uncancel = getattr(task, "uncancel", None) - if uncancel is not None and uncancel() == 0: - raise KeyboardInterrupt() - raise # CancelledError - finally: - if ( - sigint_handler is not None - and signal.getsignal(signal.SIGINT) is sigint_handler - ): - signal.signal(signal.SIGINT, signal.default_int_handler) - - def _lazy_init(self) -> None: - if self._state is _State.CLOSED: - raise RuntimeError("Runner is closed") - if self._state is _State.INITIALIZED: - return - if self._loop_factory is None: - self._loop = events.new_event_loop() - if not self._set_event_loop: - # Call set_event_loop only once to avoid calling - # attach_loop multiple times on child watchers - events.set_event_loop(self._loop) - self._set_event_loop = True - else: - self._loop = self._loop_factory() - if self._debug is not None: - self._loop.set_debug(self._debug) - self._context = contextvars.copy_context() - self._state = _State.INITIALIZED - - def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: - self._interrupt_count += 1 - if self._interrupt_count == 1 and not main_task.done(): - main_task.cancel() - # wakeup loop if it is blocked by select() with long timeout - self._loop.call_soon_threadsafe(lambda: None) - return - raise KeyboardInterrupt() - - def _cancel_all_tasks(loop: AbstractEventLoop) -> None: - to_cancel = tasks.all_tasks(loop) - if not to_cancel: - return - - for task in to_cancel: - task.cancel() - - loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - loop.call_exception_handler( - { - "message": "unhandled exception during asyncio.run() shutdown", - "exception": task.exception(), - "task": task, - } - ) - - async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: - """Schedule the shutdown of the default executor.""" - - def _do_shutdown(future: asyncio.futures.Future) -> None: - try: - loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] - loop.call_soon_threadsafe(future.set_result, None) - except Exception as ex: - loop.call_soon_threadsafe(future.set_exception, ex) - - loop._executor_shutdown_called = True - if loop._default_executor is None: - return - future = loop.create_future() - thread = threading.Thread(target=_do_shutdown, args=(future,)) - thread.start() - try: - await future - finally: - thread.join() - - -T_Retval = TypeVar("T_Retval") -T_contra = TypeVar("T_contra", contravariant=True) -PosArgsT = TypeVarTuple("PosArgsT") -P = ParamSpec("P") - -_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") - - -def find_root_task() -> asyncio.Task: - root_task = _root_task.get(None) - if root_task is not None and not root_task.done(): - return root_task - - # Look for a task that has been started via run_until_complete() - for task in all_tasks(): - if task._callbacks and not task.done(): - callbacks = [cb for cb, context in task._callbacks] - for cb in callbacks: - if ( - cb is _run_until_complete_cb - or getattr(cb, "__module__", None) == "uvloop.loop" - ): - _root_task.set(task) - return task - - # Look up the topmost task in the AnyIO task tree, if possible - task = cast(asyncio.Task, current_task()) - state = _task_states.get(task) - if state: - cancel_scope = state.cancel_scope - while cancel_scope and cancel_scope._parent_scope is not None: - cancel_scope = cancel_scope._parent_scope - - if cancel_scope is not None: - return cast(asyncio.Task, cancel_scope._host_task) - - return task - - -def get_callable_name(func: Callable) -> str: - module = getattr(func, "__module__", None) - qualname = getattr(func, "__qualname__", None) - return ".".join([x for x in (module, qualname) if x]) - - -# -# Event loop -# - -_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() - - -def _task_started(task: asyncio.Task) -> bool: - """Return ``True`` if the task has been started and has not finished.""" - # The task coro should never be None here, as we never add finished tasks to the - # task list - coro = task.get_coro() - assert coro is not None - try: - return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) - except AttributeError: - # task coro is async_genenerator_asend https://bugs.python.org/issue37771 - raise Exception(f"Cannot determine if task {task} has started or not") from None - - -# -# Timeouts and cancellation -# - - -def is_anyio_cancellation(exc: CancelledError) -> bool: - # Sometimes third party frameworks catch a CancelledError and raise a new one, so as - # a workaround we have to look at the previous ones in __context__ too for a - # matching cancel message - while True: - if ( - exc.args - and isinstance(exc.args[0], str) - and exc.args[0].startswith("Cancelled by cancel scope ") - ): - return True - - if isinstance(exc.__context__, CancelledError): - exc = exc.__context__ - continue - - return False - - -class CancelScope(BaseCancelScope): - def __new__( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return object.__new__(cls) - - def __init__(self, deadline: float = math.inf, shield: bool = False): - self._deadline = deadline - self._shield = shield - self._parent_scope: CancelScope | None = None - self._child_scopes: set[CancelScope] = set() - self._cancel_called = False - self._cancelled_caught = False - self._active = False - self._timeout_handle: asyncio.TimerHandle | None = None - self._cancel_handle: asyncio.Handle | None = None - self._tasks: set[asyncio.Task] = set() - self._host_task: asyncio.Task | None = None - if sys.version_info >= (3, 11): - self._pending_uncancellations: int | None = 0 - else: - self._pending_uncancellations = None - - def __enter__(self) -> CancelScope: - if self._active: - raise RuntimeError( - "Each CancelScope may only be used for a single 'with' block" - ) - - self._host_task = host_task = cast(asyncio.Task, current_task()) - self._tasks.add(host_task) - try: - task_state = _task_states[host_task] - except KeyError: - task_state = TaskState(None, self) - _task_states[host_task] = task_state - else: - self._parent_scope = task_state.cancel_scope - task_state.cancel_scope = self - if self._parent_scope is not None: - # If using an eager task factory, the parent scope may not even contain - # the host task - self._parent_scope._child_scopes.add(self) - self._parent_scope._tasks.discard(host_task) - - self._timeout() - self._active = True - - # Start cancelling the host task if the scope was cancelled before entering - if self._cancel_called: - self._deliver_cancellation(self) - - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool: - del exc_tb - - if not self._active: - raise RuntimeError("This cancel scope is not active") - if current_task() is not self._host_task: - raise RuntimeError( - "Attempted to exit cancel scope in a different task than it was " - "entered in" - ) - - assert self._host_task is not None - host_task_state = _task_states.get(self._host_task) - if host_task_state is None or host_task_state.cancel_scope is not self: - raise RuntimeError( - "Attempted to exit a cancel scope that isn't the current tasks's " - "current cancel scope" - ) - - try: - self._active = False - if self._timeout_handle: - self._timeout_handle.cancel() - self._timeout_handle = None - - self._tasks.remove(self._host_task) - if self._parent_scope is not None: - self._parent_scope._child_scopes.remove(self) - self._parent_scope._tasks.add(self._host_task) - - host_task_state.cancel_scope = self._parent_scope - - # Restart the cancellation effort in the closest visible, cancelled parent - # scope if necessary - self._restart_cancellation_in_parent() - - # We only swallow the exception iff it was an AnyIO CancelledError, either - # directly as exc_val or inside an exception group and there are no cancelled - # parent cancel scopes visible to us here - if self._cancel_called and not self._parent_cancellation_is_visible_to_us: - # For each level-cancel() call made on the host task, call uncancel() - while self._pending_uncancellations: - self._host_task.uncancel() - self._pending_uncancellations -= 1 - - # Update cancelled_caught and check for exceptions we must not swallow - cannot_swallow_exc_val = False - if exc_val is not None: - for exc in iterate_exceptions(exc_val): - if isinstance(exc, CancelledError) and is_anyio_cancellation( - exc - ): - self._cancelled_caught = True - else: - cannot_swallow_exc_val = True - - return self._cancelled_caught and not cannot_swallow_exc_val - else: - if self._pending_uncancellations: - assert self._parent_scope is not None - assert self._parent_scope._pending_uncancellations is not None - self._parent_scope._pending_uncancellations += ( - self._pending_uncancellations - ) - self._pending_uncancellations = 0 - - return False - finally: - self._host_task = None - del exc_val - - @property - def _effectively_cancelled(self) -> bool: - cancel_scope: CancelScope | None = self - while cancel_scope is not None: - if cancel_scope._cancel_called: - return True - - if cancel_scope.shield: - return False - - cancel_scope = cancel_scope._parent_scope - - return False - - @property - def _parent_cancellation_is_visible_to_us(self) -> bool: - return ( - self._parent_scope is not None - and not self.shield - and self._parent_scope._effectively_cancelled - ) - - def _timeout(self) -> None: - if self._deadline != math.inf: - loop = get_running_loop() - if loop.time() >= self._deadline: - self.cancel() - else: - self._timeout_handle = loop.call_at(self._deadline, self._timeout) - - def _deliver_cancellation(self, origin: CancelScope) -> bool: - """ - Deliver cancellation to directly contained tasks and nested cancel scopes. - - Schedule another run at the end if we still have tasks eligible for - cancellation. - - :param origin: the cancel scope that originated the cancellation - :return: ``True`` if the delivery needs to be retried on the next cycle - - """ - should_retry = False - current = current_task() - for task in self._tasks: - should_retry = True - if task._must_cancel: # type: ignore[attr-defined] - continue - - # The task is eligible for cancellation if it has started - if task is not current and (task is self._host_task or _task_started(task)): - waiter = task._fut_waiter # type: ignore[attr-defined] - if not isinstance(waiter, asyncio.Future) or not waiter.done(): - task.cancel(f"Cancelled by cancel scope {id(origin):x}") - if ( - task is origin._host_task - and origin._pending_uncancellations is not None - ): - origin._pending_uncancellations += 1 - - # Deliver cancellation to child scopes that aren't shielded or running their own - # cancellation callbacks - for scope in self._child_scopes: - if not scope._shield and not scope.cancel_called: - should_retry = scope._deliver_cancellation(origin) or should_retry - - # Schedule another callback if there are still tasks left - if origin is self: - if should_retry: - self._cancel_handle = get_running_loop().call_soon( - self._deliver_cancellation, origin - ) - else: - self._cancel_handle = None - - return should_retry - - def _restart_cancellation_in_parent(self) -> None: - """ - Restart the cancellation effort in the closest directly cancelled parent scope. - - """ - scope = self._parent_scope - while scope is not None: - if scope._cancel_called: - if scope._cancel_handle is None: - scope._deliver_cancellation(scope) - - break - - # No point in looking beyond any shielded scope - if scope._shield: - break - - scope = scope._parent_scope - - def cancel(self) -> None: - if not self._cancel_called: - if self._timeout_handle: - self._timeout_handle.cancel() - self._timeout_handle = None - - self._cancel_called = True - if self._host_task is not None: - self._deliver_cancellation(self) - - @property - def deadline(self) -> float: - return self._deadline - - @deadline.setter - def deadline(self, value: float) -> None: - self._deadline = float(value) - if self._timeout_handle is not None: - self._timeout_handle.cancel() - self._timeout_handle = None - - if self._active and not self._cancel_called: - self._timeout() - - @property - def cancel_called(self) -> bool: - return self._cancel_called - - @property - def cancelled_caught(self) -> bool: - return self._cancelled_caught - - @property - def shield(self) -> bool: - return self._shield - - @shield.setter - def shield(self, value: bool) -> None: - if self._shield != value: - self._shield = value - if not value: - self._restart_cancellation_in_parent() - - -# -# Task states -# - - -class TaskState: - """ - Encapsulates auxiliary task information that cannot be added to the Task instance - itself because there are no guarantees about its implementation. - """ - - __slots__ = "parent_id", "cancel_scope", "__weakref__" - - def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): - self.parent_id = parent_id - self.cancel_scope = cancel_scope - - -_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() - - -# -# Task groups -# - - -class _AsyncioTaskStatus(abc.TaskStatus): - def __init__(self, future: asyncio.Future, parent_id: int): - self._future = future - self._parent_id = parent_id - - def started(self, value: T_contra | None = None) -> None: - try: - self._future.set_result(value) - except asyncio.InvalidStateError: - if not self._future.cancelled(): - raise RuntimeError( - "called 'started' twice on the same task status" - ) from None - - task = cast(asyncio.Task, current_task()) - _task_states[task].parent_id = self._parent_id - - -if sys.version_info >= (3, 12): - _eager_task_factory_code: CodeType | None = asyncio.eager_task_factory.__code__ -else: - _eager_task_factory_code = None - - -class TaskGroup(abc.TaskGroup): - def __init__(self) -> None: - self.cancel_scope: CancelScope = CancelScope() - self._active = False - self._exceptions: list[BaseException] = [] - self._tasks: set[asyncio.Task] = set() - self._on_completed_fut: asyncio.Future[None] | None = None - - async def __aenter__(self) -> TaskGroup: - self.cancel_scope.__enter__() - self._active = True - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - try: - if exc_val is not None: - self.cancel_scope.cancel() - if not isinstance(exc_val, CancelledError): - self._exceptions.append(exc_val) - - loop = get_running_loop() - try: - if self._tasks: - with CancelScope() as wait_scope: - while self._tasks: - self._on_completed_fut = loop.create_future() - - try: - await self._on_completed_fut - except CancelledError as exc: - # Shield the scope against further cancellation attempts, - # as they're not productive (#695) - wait_scope.shield = True - self.cancel_scope.cancel() - - # Set exc_val from the cancellation exception if it was - # previously unset. However, we should not replace a native - # cancellation exception with one raise by a cancel scope. - if exc_val is None or ( - isinstance(exc_val, CancelledError) - and not is_anyio_cancellation(exc) - ): - exc_val = exc - - self._on_completed_fut = None - else: - # If there are no child tasks to wait on, run at least one checkpoint - # anyway - await AsyncIOBackend.cancel_shielded_checkpoint() - - self._active = False - if self._exceptions: - # The exception that got us here should already have been - # added to self._exceptions so it's ok to break exception - # chaining and avoid adding a "During handling of above..." - # for each nesting level. - raise BaseExceptionGroup( - "unhandled errors in a TaskGroup", self._exceptions - ) from None - elif exc_val: - raise exc_val - except BaseException as exc: - if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): - return True - - raise - - return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) - finally: - del exc_val, exc_tb, self._exceptions - - def _spawn( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - args: tuple[Unpack[PosArgsT]], - name: object, - task_status_future: asyncio.Future | None = None, - ) -> asyncio.Task: - def task_done(_task: asyncio.Task) -> None: - task_state = _task_states[_task] - assert task_state.cancel_scope is not None - assert _task in task_state.cancel_scope._tasks - task_state.cancel_scope._tasks.remove(_task) - self._tasks.remove(task) - del _task_states[_task] - - if self._on_completed_fut is not None and not self._tasks: - try: - self._on_completed_fut.set_result(None) - except asyncio.InvalidStateError: - pass - - try: - exc = _task.exception() - except CancelledError as e: - while isinstance(e.__context__, CancelledError): - e = e.__context__ - - exc = e - - if exc is not None: - # The future can only be in the cancelled state if the host task was - # cancelled, so return immediately instead of adding one more - # CancelledError to the exceptions list - if task_status_future is not None and task_status_future.cancelled(): - return - - if task_status_future is None or task_status_future.done(): - if not isinstance(exc, CancelledError): - self._exceptions.append(exc) - - if not self.cancel_scope._effectively_cancelled: - self.cancel_scope.cancel() - else: - task_status_future.set_exception(exc) - elif task_status_future is not None and not task_status_future.done(): - task_status_future.set_exception( - RuntimeError("Child exited without calling task_status.started()") - ) - - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - kwargs = {} - if task_status_future: - parent_id = id(current_task()) - kwargs["task_status"] = _AsyncioTaskStatus( - task_status_future, id(self.cancel_scope._host_task) - ) - else: - parent_id = id(self.cancel_scope._host_task) - - coro = func(*args, **kwargs) - if not iscoroutine(coro): - prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" - raise TypeError( - f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " - f"the return value ({coro!r}) is not a coroutine object" - ) - - name = get_callable_name(func) if name is None else str(name) - loop = asyncio.get_running_loop() - if ( - (factory := loop.get_task_factory()) - and getattr(factory, "__code__", None) is _eager_task_factory_code - and (closure := getattr(factory, "__closure__", None)) - ): - custom_task_constructor = closure[0].cell_contents - task = custom_task_constructor(coro, loop=loop, name=name) - else: - task = create_task(coro, name=name) - - # Make the spawned task inherit the task group's cancel scope - _task_states[task] = TaskState( - parent_id=parent_id, cancel_scope=self.cancel_scope - ) - self.cancel_scope._tasks.add(task) - self._tasks.add(task) - task.add_done_callback(task_done) - return task - - def start_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> None: - self._spawn(func, args, name) - - async def start( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> Any: - future: asyncio.Future = asyncio.Future() - task = self._spawn(func, args, name, future) - - # If the task raises an exception after sending a start value without a switch - # point between, the task group is cancelled and this method never proceeds to - # process the completed future. That's why we have to have a shielded cancel - # scope here. - try: - return await future - except CancelledError: - # Cancel the task and wait for it to exit before returning - task.cancel() - with CancelScope(shield=True), suppress(CancelledError): - await task - - raise - - -# -# Threads -# - -_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]] - - -class WorkerThread(Thread): - MAX_IDLE_TIME = 10 # seconds - - def __init__( - self, - root_task: asyncio.Task, - workers: set[WorkerThread], - idle_workers: deque[WorkerThread], - ): - super().__init__(name="AnyIO worker thread") - self.root_task = root_task - self.workers = workers - self.idle_workers = idle_workers - self.loop = root_task._loop - self.queue: Queue[ - tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None - ] = Queue(2) - self.idle_since = AsyncIOBackend.current_time() - self.stopping = False - - def _report_result( - self, future: asyncio.Future, result: Any, exc: BaseException | None - ) -> None: - self.idle_since = AsyncIOBackend.current_time() - if not self.stopping: - self.idle_workers.append(self) - - if not future.cancelled(): - if exc is not None: - if isinstance(exc, StopIteration): - new_exc = RuntimeError("coroutine raised StopIteration") - new_exc.__cause__ = exc - exc = new_exc - - future.set_exception(exc) - else: - future.set_result(result) - - def run(self) -> None: - with claim_worker_thread(AsyncIOBackend, self.loop): - while True: - item = self.queue.get() - if item is None: - # Shutdown command received - return - - context, func, args, future, cancel_scope = item - if not future.cancelled(): - result = None - exception: BaseException | None = None - threadlocals.current_cancel_scope = cancel_scope - try: - result = context.run(func, *args) - except BaseException as exc: - exception = exc - finally: - del threadlocals.current_cancel_scope - - if not self.loop.is_closed(): - self.loop.call_soon_threadsafe( - self._report_result, future, result, exception - ) - - del result, exception - - self.queue.task_done() - del item, context, func, args, future, cancel_scope - - def stop(self, f: asyncio.Task | None = None) -> None: - self.stopping = True - self.queue.put_nowait(None) - self.workers.discard(self) - try: - self.idle_workers.remove(self) - except ValueError: - pass - - -_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( - "_threadpool_idle_workers" -) -_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") - - -class BlockingPortal(abc.BlockingPortal): - def __new__(cls) -> BlockingPortal: - return object.__new__(cls) - - def __init__(self) -> None: - super().__init__() - self._loop = get_running_loop() - - def _spawn_task_from_thread( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - name: object, - future: Future[T_Retval], - ) -> None: - AsyncIOBackend.run_sync_from_thread( - partial(self._task_group.start_soon, name=name), - (self._call_func, func, args, kwargs, future), - self._loop, - ) - - -# -# Subprocesses -# - - -@dataclass(eq=False) -class StreamReaderWrapper(abc.ByteReceiveStream): - _stream: asyncio.StreamReader - - async def receive(self, max_bytes: int = 65536) -> bytes: - data = await self._stream.read(max_bytes) - if data: - return data - else: - raise EndOfStream - - async def aclose(self) -> None: - self._stream.set_exception(ClosedResourceError()) - await AsyncIOBackend.checkpoint() - - -@dataclass(eq=False) -class StreamWriterWrapper(abc.ByteSendStream): - _stream: asyncio.StreamWriter - - async def send(self, item: bytes) -> None: - self._stream.write(item) - await self._stream.drain() - - async def aclose(self) -> None: - self._stream.close() - await AsyncIOBackend.checkpoint() - - -@dataclass(eq=False) -class Process(abc.Process): - _process: asyncio.subprocess.Process - _stdin: StreamWriterWrapper | None - _stdout: StreamReaderWrapper | None - _stderr: StreamReaderWrapper | None - - async def aclose(self) -> None: - with CancelScope(shield=True) as scope: - if self._stdin: - await self._stdin.aclose() - if self._stdout: - await self._stdout.aclose() - if self._stderr: - await self._stderr.aclose() - - scope.shield = False - try: - await self.wait() - except BaseException: - scope.shield = True - self.kill() - await self.wait() - raise - - async def wait(self) -> int: - return await self._process.wait() - - def terminate(self) -> None: - self._process.terminate() - - def kill(self) -> None: - self._process.kill() - - def send_signal(self, signal: int) -> None: - self._process.send_signal(signal) - - @property - def pid(self) -> int: - return self._process.pid - - @property - def returncode(self) -> int | None: - return self._process.returncode - - @property - def stdin(self) -> abc.ByteSendStream | None: - return self._stdin - - @property - def stdout(self) -> abc.ByteReceiveStream | None: - return self._stdout - - @property - def stderr(self) -> abc.ByteReceiveStream | None: - return self._stderr - - -def _forcibly_shutdown_process_pool_on_exit( - workers: set[Process], _task: object -) -> None: - """ - Forcibly shuts down worker processes belonging to this event loop.""" - child_watcher: asyncio.AbstractChildWatcher | None = None - if sys.version_info < (3, 12): - try: - child_watcher = asyncio.get_event_loop_policy().get_child_watcher() - except NotImplementedError: - pass - - # Close as much as possible (w/o async/await) to avoid warnings - for process in workers: - if process.returncode is None: - continue - - process._stdin._stream._transport.close() # type: ignore[union-attr] - process._stdout._stream._transport.close() # type: ignore[union-attr] - process._stderr._stream._transport.close() # type: ignore[union-attr] - process.kill() - if child_watcher: - child_watcher.remove_child_handler(process.pid) - - -async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: - """ - Shuts down worker processes belonging to this event loop. - - NOTE: this only works when the event loop was started using asyncio.run() or - anyio.run(). - - """ - process: abc.Process - try: - await sleep(math.inf) - except asyncio.CancelledError: - for process in workers: - if process.returncode is None: - process.kill() - - for process in workers: - await process.aclose() - - -# -# Sockets and networking -# - - -class StreamProtocol(asyncio.Protocol): - read_queue: deque[bytes] - read_event: asyncio.Event - write_event: asyncio.Event - exception: Exception | None = None - is_at_eof: bool = False - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - self.read_queue = deque() - self.read_event = asyncio.Event() - self.write_event = asyncio.Event() - self.write_event.set() - cast(asyncio.Transport, transport).set_write_buffer_limits(0) - - def connection_lost(self, exc: Exception | None) -> None: - if exc: - self.exception = BrokenResourceError() - self.exception.__cause__ = exc - - self.read_event.set() - self.write_event.set() - - def data_received(self, data: bytes) -> None: - # ProactorEventloop sometimes sends bytearray instead of bytes - self.read_queue.append(bytes(data)) - self.read_event.set() - - def eof_received(self) -> bool | None: - self.is_at_eof = True - self.read_event.set() - return True - - def pause_writing(self) -> None: - self.write_event = asyncio.Event() - - def resume_writing(self) -> None: - self.write_event.set() - - -class DatagramProtocol(asyncio.DatagramProtocol): - read_queue: deque[tuple[bytes, IPSockAddrType]] - read_event: asyncio.Event - write_event: asyncio.Event - exception: Exception | None = None - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - self.read_queue = deque(maxlen=100) # arbitrary value - self.read_event = asyncio.Event() - self.write_event = asyncio.Event() - self.write_event.set() - - def connection_lost(self, exc: Exception | None) -> None: - self.read_event.set() - self.write_event.set() - - def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: - addr = convert_ipv6_sockaddr(addr) - self.read_queue.append((data, addr)) - self.read_event.set() - - def error_received(self, exc: Exception) -> None: - self.exception = exc - - def pause_writing(self) -> None: - self.write_event.clear() - - def resume_writing(self) -> None: - self.write_event.set() - - -class SocketStream(abc.SocketStream): - def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def receive(self, max_bytes: int = 65536) -> bytes: - with self._receive_guard: - if ( - not self._protocol.read_event.is_set() - and not self._transport.is_closing() - and not self._protocol.is_at_eof - ): - self._transport.resume_reading() - await self._protocol.read_event.wait() - self._transport.pause_reading() - else: - await AsyncIOBackend.checkpoint() - - try: - chunk = self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - elif self._protocol.exception: - raise self._protocol.exception from None - else: - raise EndOfStream from None - - if len(chunk) > max_bytes: - # Split the oversized chunk - chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] - self._protocol.read_queue.appendleft(leftover) - - # If the read queue is empty, clear the flag so that the next call will - # block until data is available - if not self._protocol.read_queue: - self._protocol.read_event.clear() - - return chunk - - async def send(self, item: bytes) -> None: - with self._send_guard: - await AsyncIOBackend.checkpoint() - - if self._closed: - raise ClosedResourceError - elif self._protocol.exception is not None: - raise self._protocol.exception - - try: - self._transport.write(item) - except RuntimeError as exc: - if self._transport.is_closing(): - raise BrokenResourceError from exc - else: - raise - - await self._protocol.write_event.wait() - - async def send_eof(self) -> None: - try: - self._transport.write_eof() - except OSError: - pass - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - try: - self._transport.write_eof() - except OSError: - pass - - self._transport.close() - await sleep(0) - self._transport.abort() - - -class _RawSocketMixin: - _receive_future: asyncio.Future | None = None - _send_future: asyncio.Future | None = None - _closing = False - - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: - def callback(f: object) -> None: - del self._receive_future - loop.remove_reader(self.__raw_socket) - - f = self._receive_future = asyncio.Future() - loop.add_reader(self.__raw_socket, f.set_result, None) - f.add_done_callback(callback) - return f - - def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: - def callback(f: object) -> None: - del self._send_future - loop.remove_writer(self.__raw_socket) - - f = self._send_future = asyncio.Future() - loop.add_writer(self.__raw_socket, f.set_result, None) - f.add_done_callback(callback) - return f - - async def aclose(self) -> None: - if not self._closing: - self._closing = True - if self.__raw_socket.fileno() != -1: - self.__raw_socket.close() - - if self._receive_future: - self._receive_future.set_result(None) - if self._send_future: - self._send_future.set_result(None) - - -class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): - async def send_eof(self) -> None: - with self._send_guard: - self._raw_socket.shutdown(socket.SHUT_WR) - - async def receive(self, max_bytes: int = 65536) -> bytes: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - data = self._raw_socket.recv(max_bytes) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - if not data: - raise EndOfStream - - return data - - async def send(self, item: bytes) -> None: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._send_guard: - view = memoryview(item) - while view: - try: - bytes_sent = self._raw_socket.send(view) - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - view = view[bytes_sent:] - - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - if not isinstance(msglen, int) or msglen < 0: - raise ValueError("msglen must be a non-negative integer") - if not isinstance(maxfds, int) or maxfds < 1: - raise ValueError("maxfds must be a positive integer") - - loop = get_running_loop() - fds = array.array("i") - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - message, ancdata, flags, addr = self._raw_socket.recvmsg( - msglen, socket.CMSG_LEN(maxfds * fds.itemsize) - ) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - if not message and not ancdata: - raise EndOfStream - - break - - for cmsg_level, cmsg_type, cmsg_data in ancdata: - if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: - raise RuntimeError( - f"Received unexpected ancillary data; message = {message!r}, " - f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" - ) - - fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) - - return message, list(fds) - - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - if not message: - raise ValueError("message must not be empty") - if not fds: - raise ValueError("fds must not be empty") - - loop = get_running_loop() - filenos: list[int] = [] - for fd in fds: - if isinstance(fd, int): - filenos.append(fd) - elif isinstance(fd, IOBase): - filenos.append(fd.fileno()) - - fdarray = array.array("i", filenos) - await AsyncIOBackend.checkpoint() - with self._send_guard: - while True: - try: - # The ignore can be removed after mypy picks up - # https://github.com/python/typeshed/pull/5545 - self._raw_socket.sendmsg( - [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] - ) - break - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - - -class TCPSocketListener(abc.SocketListener): - _accept_scope: CancelScope | None = None - _closed = False - - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) - self._accept_guard = ResourceGuard("accepting connections from") - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - async def accept(self) -> abc.SocketStream: - if self._closed: - raise ClosedResourceError - - with self._accept_guard: - await AsyncIOBackend.checkpoint() - with CancelScope() as self._accept_scope: - try: - client_sock, _addr = await self._loop.sock_accept(self._raw_socket) - except asyncio.CancelledError: - # Workaround for https://bugs.python.org/issue41317 - try: - self._loop.remove_reader(self._raw_socket) - except (ValueError, NotImplementedError): - pass - - if self._closed: - raise ClosedResourceError from None - - raise - finally: - self._accept_scope = None - - client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - transport, protocol = await self._loop.connect_accepted_socket( - StreamProtocol, client_sock - ) - return SocketStream(transport, protocol) - - async def aclose(self) -> None: - if self._closed: - return - - self._closed = True - if self._accept_scope: - # Workaround for https://bugs.python.org/issue41317 - try: - self._loop.remove_reader(self._raw_socket) - except (ValueError, NotImplementedError): - pass - - self._accept_scope.cancel() - await sleep(0) - - self._raw_socket.close() - - -class UNIXSocketListener(abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._loop = get_running_loop() - self._accept_guard = ResourceGuard("accepting connections from") - self._closed = False - - async def accept(self) -> abc.SocketStream: - await AsyncIOBackend.checkpoint() - with self._accept_guard: - while True: - try: - client_sock, _ = self.__raw_socket.accept() - client_sock.setblocking(False) - return UNIXSocketStream(client_sock) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - self._loop.add_reader(self.__raw_socket, f.set_result, None) - f.add_done_callback( - lambda _: self._loop.remove_reader(self.__raw_socket) - ) - await f - except OSError as exc: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - - async def aclose(self) -> None: - self._closed = True - self.__raw_socket.close() - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - -class UDPSocket(abc.UDPSocket): - def __init__( - self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol - ): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - self._transport.close() - - async def receive(self) -> tuple[bytes, IPSockAddrType]: - with self._receive_guard: - await AsyncIOBackend.checkpoint() - - # If the buffer is empty, ask for more data - if not self._protocol.read_queue and not self._transport.is_closing(): - self._protocol.read_event.clear() - await self._protocol.read_event.wait() - - try: - return self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from None - - async def send(self, item: UDPPacketType) -> None: - with self._send_guard: - await AsyncIOBackend.checkpoint() - await self._protocol.write_event.wait() - if self._closed: - raise ClosedResourceError - elif self._transport.is_closing(): - raise BrokenResourceError - else: - self._transport.sendto(*item) - - -class ConnectedUDPSocket(abc.ConnectedUDPSocket): - def __init__( - self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol - ): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - self._transport.close() - - async def receive(self) -> bytes: - with self._receive_guard: - await AsyncIOBackend.checkpoint() - - # If the buffer is empty, ask for more data - if not self._protocol.read_queue and not self._transport.is_closing(): - self._protocol.read_event.clear() - await self._protocol.read_event.wait() - - try: - packet = self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from None - - return packet[0] - - async def send(self, item: bytes) -> None: - with self._send_guard: - await AsyncIOBackend.checkpoint() - await self._protocol.write_event.wait() - if self._closed: - raise ClosedResourceError - elif self._transport.is_closing(): - raise BrokenResourceError - else: - self._transport.sendto(item) - - -class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): - async def receive(self) -> UNIXDatagramPacketType: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - data = self._raw_socket.recvfrom(65536) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return data - - async def send(self, item: UNIXDatagramPacketType) -> None: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._send_guard: - while True: - try: - self._raw_socket.sendto(*item) - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return - - -class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): - async def receive(self) -> bytes: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - data = self._raw_socket.recv(65536) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return data - - async def send(self, item: bytes) -> None: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._send_guard: - while True: - try: - self._raw_socket.send(item) - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return - - -_read_events: RunVar[dict[int, asyncio.Event]] = RunVar("read_events") -_write_events: RunVar[dict[int, asyncio.Event]] = RunVar("write_events") - - -# -# Synchronization -# - - -class Event(BaseEvent): - def __new__(cls) -> Event: - return object.__new__(cls) - - def __init__(self) -> None: - self._event = asyncio.Event() - - def set(self) -> None: - self._event.set() - - def is_set(self) -> bool: - return self._event.is_set() - - async def wait(self) -> None: - if self.is_set(): - await AsyncIOBackend.checkpoint() - else: - await self._event.wait() - - def statistics(self) -> EventStatistics: - return EventStatistics(len(self._event._waiters)) - - -class Lock(BaseLock): - def __new__(cls, *, fast_acquire: bool = False) -> Lock: - return object.__new__(cls) - - def __init__(self, *, fast_acquire: bool = False) -> None: - self._fast_acquire = fast_acquire - self._owner_task: asyncio.Task | None = None - self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque() - - async def acquire(self) -> None: - task = cast(asyncio.Task, current_task()) - if self._owner_task is None and not self._waiters: - await AsyncIOBackend.checkpoint_if_cancelled() - self._owner_task = task - - # Unless on the "fast path", yield control of the event loop so that other - # tasks can run too - if not self._fast_acquire: - try: - await AsyncIOBackend.cancel_shielded_checkpoint() - except CancelledError: - self.release() - raise - - return - - if self._owner_task == task: - raise RuntimeError("Attempted to acquire an already held Lock") - - fut: asyncio.Future[None] = asyncio.Future() - item = task, fut - self._waiters.append(item) - try: - await fut - except CancelledError: - self._waiters.remove(item) - if self._owner_task is task: - self.release() - - raise - - self._waiters.remove(item) - - def acquire_nowait(self) -> None: - task = cast(asyncio.Task, current_task()) - if self._owner_task is None and not self._waiters: - self._owner_task = task - return - - if self._owner_task is task: - raise RuntimeError("Attempted to acquire an already held Lock") - - raise WouldBlock - - def locked(self) -> bool: - return self._owner_task is not None - - def release(self) -> None: - if self._owner_task != current_task(): - raise RuntimeError("The current task is not holding this lock") - - for task, fut in self._waiters: - if not fut.cancelled(): - self._owner_task = task - fut.set_result(None) - return - - self._owner_task = None - - def statistics(self) -> LockStatistics: - task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None - return LockStatistics(self.locked(), task_info, len(self._waiters)) - - -class Semaphore(BaseSemaphore): - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - return object.__new__(cls) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ): - super().__init__(initial_value, max_value=max_value) - self._value = initial_value - self._max_value = max_value - self._fast_acquire = fast_acquire - self._waiters: deque[asyncio.Future[None]] = deque() - - async def acquire(self) -> None: - if self._value > 0 and not self._waiters: - await AsyncIOBackend.checkpoint_if_cancelled() - self._value -= 1 - - # Unless on the "fast path", yield control of the event loop so that other - # tasks can run too - if not self._fast_acquire: - try: - await AsyncIOBackend.cancel_shielded_checkpoint() - except CancelledError: - self.release() - raise - - return - - fut: asyncio.Future[None] = asyncio.Future() - self._waiters.append(fut) - try: - await fut - except CancelledError: - try: - self._waiters.remove(fut) - except ValueError: - self.release() - - raise - - def acquire_nowait(self) -> None: - if self._value == 0: - raise WouldBlock - - self._value -= 1 - - def release(self) -> None: - if self._max_value is not None and self._value == self._max_value: - raise ValueError("semaphore released too many times") - - for fut in self._waiters: - if not fut.cancelled(): - fut.set_result(None) - self._waiters.remove(fut) - return - - self._value += 1 - - @property - def value(self) -> int: - return self._value - - @property - def max_value(self) -> int | None: - return self._max_value - - def statistics(self) -> SemaphoreStatistics: - return SemaphoreStatistics(len(self._waiters)) - - -class CapacityLimiter(BaseCapacityLimiter): - _total_tokens: float = 0 - - def __new__(cls, total_tokens: float) -> CapacityLimiter: - return object.__new__(cls) - - def __init__(self, total_tokens: float): - self._borrowers: set[Any] = set() - self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() - self.total_tokens = total_tokens - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - @property - def total_tokens(self) -> float: - return self._total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - if not isinstance(value, int) and not math.isinf(value): - raise TypeError("total_tokens must be an int or math.inf") - if value < 1: - raise ValueError("total_tokens must be >= 1") - - waiters_to_notify = max(value - self._total_tokens, 0) - self._total_tokens = value - - # Notify waiting tasks that they have acquired the limiter - while self._wait_queue and waiters_to_notify: - event = self._wait_queue.popitem(last=False)[1] - event.set() - waiters_to_notify -= 1 - - @property - def borrowed_tokens(self) -> int: - return len(self._borrowers) - - @property - def available_tokens(self) -> float: - return self._total_tokens - len(self._borrowers) - - def acquire_nowait(self) -> None: - self.acquire_on_behalf_of_nowait(current_task()) - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - if borrower in self._borrowers: - raise RuntimeError( - "this borrower is already holding one of this CapacityLimiter's tokens" - ) - - if self._wait_queue or len(self._borrowers) >= self._total_tokens: - raise WouldBlock - - self._borrowers.add(borrower) - - async def acquire(self) -> None: - return await self.acquire_on_behalf_of(current_task()) - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await AsyncIOBackend.checkpoint_if_cancelled() - try: - self.acquire_on_behalf_of_nowait(borrower) - except WouldBlock: - event = asyncio.Event() - self._wait_queue[borrower] = event - try: - await event.wait() - except BaseException: - self._wait_queue.pop(borrower, None) - raise - - self._borrowers.add(borrower) - else: - try: - await AsyncIOBackend.cancel_shielded_checkpoint() - except BaseException: - self.release() - raise - - def release(self) -> None: - self.release_on_behalf_of(current_task()) - - def release_on_behalf_of(self, borrower: object) -> None: - try: - self._borrowers.remove(borrower) - except KeyError: - raise RuntimeError( - "this borrower isn't holding any of this CapacityLimiter's tokens" - ) from None - - # Notify the next task in line if this limiter has free capacity now - if self._wait_queue and len(self._borrowers) < self._total_tokens: - event = self._wait_queue.popitem(last=False)[1] - event.set() - - def statistics(self) -> CapacityLimiterStatistics: - return CapacityLimiterStatistics( - self.borrowed_tokens, - self.total_tokens, - tuple(self._borrowers), - len(self._wait_queue), - ) - - -_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") - - -# -# Operating system signals -# - - -class _SignalReceiver: - def __init__(self, signals: tuple[Signals, ...]): - self._signals = signals - self._loop = get_running_loop() - self._signal_queue: deque[Signals] = deque() - self._future: asyncio.Future = asyncio.Future() - self._handled_signals: set[Signals] = set() - - def _deliver(self, signum: Signals) -> None: - self._signal_queue.append(signum) - if not self._future.done(): - self._future.set_result(None) - - def __enter__(self) -> _SignalReceiver: - for sig in set(self._signals): - self._loop.add_signal_handler(sig, self._deliver, sig) - self._handled_signals.add(sig) - - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - for sig in self._handled_signals: - self._loop.remove_signal_handler(sig) - - def __aiter__(self) -> _SignalReceiver: - return self - - async def __anext__(self) -> Signals: - await AsyncIOBackend.checkpoint() - if not self._signal_queue: - self._future = asyncio.Future() - await self._future - - return self._signal_queue.popleft() - - -# -# Testing and debugging -# - - -class AsyncIOTaskInfo(TaskInfo): - def __init__(self, task: asyncio.Task): - task_state = _task_states.get(task) - if task_state is None: - parent_id = None - else: - parent_id = task_state.parent_id - - coro = task.get_coro() - assert coro is not None, "created TaskInfo from a completed Task" - super().__init__(id(task), parent_id, task.get_name(), coro) - self._task = weakref.ref(task) - - def has_pending_cancellation(self) -> bool: - if not (task := self._task()): - # If the task isn't around anymore, it won't have a pending cancellation - return False - - if task._must_cancel: # type: ignore[attr-defined] - return True - elif ( - isinstance(task._fut_waiter, asyncio.Future) # type: ignore[attr-defined] - and task._fut_waiter.cancelled() # type: ignore[attr-defined] - ): - return True - - if task_state := _task_states.get(task): - if cancel_scope := task_state.cancel_scope: - return cancel_scope._effectively_cancelled - - return False - - -class TestRunner(abc.TestRunner): - _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] - - def __init__( - self, - *, - debug: bool | None = None, - use_uvloop: bool = False, - loop_factory: Callable[[], AbstractEventLoop] | None = None, - ) -> None: - if use_uvloop and loop_factory is None: - import uvloop - - loop_factory = uvloop.new_event_loop - - self._runner = Runner(debug=debug, loop_factory=loop_factory) - self._exceptions: list[BaseException] = [] - self._runner_task: asyncio.Task | None = None - - def __enter__(self) -> TestRunner: - self._runner.__enter__() - self.get_loop().set_exception_handler(self._exception_handler) - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self._runner.__exit__(exc_type, exc_val, exc_tb) - - def get_loop(self) -> AbstractEventLoop: - return self._runner.get_loop() - - def _exception_handler( - self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] - ) -> None: - if isinstance(context.get("exception"), Exception): - self._exceptions.append(context["exception"]) - else: - loop.default_exception_handler(context) - - def _raise_async_exceptions(self) -> None: - # Re-raise any exceptions raised in asynchronous callbacks - if self._exceptions: - exceptions, self._exceptions = self._exceptions, [] - if len(exceptions) == 1: - raise exceptions[0] - elif exceptions: - raise BaseExceptionGroup( - "Multiple exceptions occurred in asynchronous callbacks", exceptions - ) - - async def _run_tests_and_fixtures( - self, - receive_stream: MemoryObjectReceiveStream[ - tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] - ], - ) -> None: - from _pytest.outcomes import OutcomeException - - with receive_stream, self._send_stream: - async for coro, future in receive_stream: - try: - retval = await coro - except CancelledError as exc: - if not future.cancelled(): - future.cancel(*exc.args) - - raise - except BaseException as exc: - if not future.cancelled(): - future.set_exception(exc) - - if not isinstance(exc, (Exception, OutcomeException)): - raise - else: - if not future.cancelled(): - future.set_result(retval) - - async def _call_in_runner_task( - self, - func: Callable[P, Awaitable[T_Retval]], - *args: P.args, - **kwargs: P.kwargs, - ) -> T_Retval: - if not self._runner_task: - self._send_stream, receive_stream = create_memory_object_stream[ - tuple[Awaitable[Any], asyncio.Future] - ](1) - self._runner_task = self.get_loop().create_task( - self._run_tests_and_fixtures(receive_stream) - ) - - coro = func(*args, **kwargs) - future: asyncio.Future[T_Retval] = self.get_loop().create_future() - self._send_stream.send_nowait((coro, future)) - return await future - - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], - kwargs: dict[str, Any], - ) -> Iterable[T_Retval]: - asyncgen = fixture_func(**kwargs) - fixturevalue: T_Retval = self.get_loop().run_until_complete( - self._call_in_runner_task(asyncgen.asend, None) - ) - self._raise_async_exceptions() - - yield fixturevalue - - try: - self.get_loop().run_until_complete( - self._call_in_runner_task(asyncgen.asend, None) - ) - except StopAsyncIteration: - self._raise_async_exceptions() - else: - self.get_loop().run_until_complete(asyncgen.aclose()) - raise RuntimeError("Async generator fixture did not stop") - - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], - kwargs: dict[str, Any], - ) -> T_Retval: - retval = self.get_loop().run_until_complete( - self._call_in_runner_task(fixture_func, **kwargs) - ) - self._raise_async_exceptions() - return retval - - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - try: - self.get_loop().run_until_complete( - self._call_in_runner_task(test_func, **kwargs) - ) - except Exception as exc: - self._exceptions.append(exc) - - self._raise_async_exceptions() - - -class AsyncIOBackend(AsyncBackend): - @classmethod - def run( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - options: dict[str, Any], - ) -> T_Retval: - @wraps(func) - async def wrapper() -> T_Retval: - task = cast(asyncio.Task, current_task()) - task.set_name(get_callable_name(func)) - _task_states[task] = TaskState(None, None) - - try: - return await func(*args) - finally: - del _task_states[task] - - debug = options.get("debug", None) - loop_factory = options.get("loop_factory", None) - if loop_factory is None and options.get("use_uvloop", False): - import uvloop - - loop_factory = uvloop.new_event_loop - - with Runner(debug=debug, loop_factory=loop_factory) as runner: - return runner.run(wrapper()) - - @classmethod - def current_token(cls) -> object: - return get_running_loop() - - @classmethod - def current_time(cls) -> float: - return get_running_loop().time() - - @classmethod - def cancelled_exception_class(cls) -> type[BaseException]: - return CancelledError - - @classmethod - async def checkpoint(cls) -> None: - await sleep(0) - - @classmethod - async def checkpoint_if_cancelled(cls) -> None: - task = current_task() - if task is None: - return - - try: - cancel_scope = _task_states[task].cancel_scope - except KeyError: - return - - while cancel_scope: - if cancel_scope.cancel_called: - await sleep(0) - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - @classmethod - async def cancel_shielded_checkpoint(cls) -> None: - with CancelScope(shield=True): - await sleep(0) - - @classmethod - async def sleep(cls, delay: float) -> None: - await sleep(delay) - - @classmethod - def create_cancel_scope( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return CancelScope(deadline=deadline, shield=shield) - - @classmethod - def current_effective_deadline(cls) -> float: - if (task := current_task()) is None: - return math.inf - - try: - cancel_scope = _task_states[task].cancel_scope - except KeyError: - return math.inf - - deadline = math.inf - while cancel_scope: - deadline = min(deadline, cancel_scope.deadline) - if cancel_scope._cancel_called: - deadline = -math.inf - break - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - return deadline - - @classmethod - def create_task_group(cls) -> abc.TaskGroup: - return TaskGroup() - - @classmethod - def create_event(cls) -> abc.Event: - return Event() - - @classmethod - def create_lock(cls, *, fast_acquire: bool) -> abc.Lock: - return Lock(fast_acquire=fast_acquire) - - @classmethod - def create_semaphore( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> abc.Semaphore: - return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) - - @classmethod - def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: - return CapacityLimiter(total_tokens) - - @classmethod - async def run_sync_in_worker_thread( # type: ignore[return] - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - abandon_on_cancel: bool = False, - limiter: abc.CapacityLimiter | None = None, - ) -> T_Retval: - await cls.checkpoint() - - # If this is the first run in this event loop thread, set up the necessary - # variables - try: - idle_workers = _threadpool_idle_workers.get() - workers = _threadpool_workers.get() - except LookupError: - idle_workers = deque() - workers = set() - _threadpool_idle_workers.set(idle_workers) - _threadpool_workers.set(workers) - - async with limiter or cls.current_default_thread_limiter(): - with CancelScope(shield=not abandon_on_cancel) as scope: - future = asyncio.Future[T_Retval]() - root_task = find_root_task() - if not idle_workers: - worker = WorkerThread(root_task, workers, idle_workers) - worker.start() - workers.add(worker) - root_task.add_done_callback( - worker.stop, context=contextvars.Context() - ) - else: - worker = idle_workers.pop() - - # Prune any other workers that have been idle for MAX_IDLE_TIME - # seconds or longer - now = cls.current_time() - while idle_workers: - if ( - now - idle_workers[0].idle_since - < WorkerThread.MAX_IDLE_TIME - ): - break - - expired_worker = idle_workers.popleft() - expired_worker.root_task.remove_done_callback( - expired_worker.stop - ) - expired_worker.stop() - - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, None) - if abandon_on_cancel or scope._parent_scope is None: - worker_scope = scope - else: - worker_scope = scope._parent_scope - - worker.queue.put_nowait((context, func, args, future, worker_scope)) - return await future - - @classmethod - def check_cancelled(cls) -> None: - scope: CancelScope | None = threadlocals.current_cancel_scope - while scope is not None: - if scope.cancel_called: - raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") - - if scope.shield: - return - - scope = scope._parent_scope - - @classmethod - def run_async_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - async def task_wrapper(scope: CancelScope) -> T_Retval: - __tracebackhide__ = True - task = cast(asyncio.Task, current_task()) - _task_states[task] = TaskState(None, scope) - scope._tasks.add(task) - try: - return await func(*args) - except CancelledError as exc: - raise concurrent.futures.CancelledError(str(exc)) from None - finally: - scope._tasks.discard(task) - - loop = cast(AbstractEventLoop, token) - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, "asyncio") - wrapper = task_wrapper(threadlocals.current_cancel_scope) - f: concurrent.futures.Future[T_Retval] = context.run( - asyncio.run_coroutine_threadsafe, wrapper, loop - ) - return f.result() - - @classmethod - def run_sync_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - @wraps(func) - def wrapper() -> None: - try: - sniffio.current_async_library_cvar.set("asyncio") - f.set_result(func(*args)) - except BaseException as exc: - f.set_exception(exc) - if not isinstance(exc, Exception): - raise - - f: concurrent.futures.Future[T_Retval] = Future() - loop = cast(AbstractEventLoop, token) - loop.call_soon_threadsafe(wrapper) - return f.result() - - @classmethod - def create_blocking_portal(cls) -> abc.BlockingPortal: - return BlockingPortal() - - @classmethod - async def open_process( - cls, - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - **kwargs: Any, - ) -> Process: - await cls.checkpoint() - if isinstance(command, PathLike): - command = os.fspath(command) - - if isinstance(command, (str, bytes)): - process = await asyncio.create_subprocess_shell( - command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - **kwargs, - ) - else: - process = await asyncio.create_subprocess_exec( - *command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - **kwargs, - ) - - stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None - stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None - stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - @classmethod - def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: - create_task( - _shutdown_process_pool_on_exit(workers), - name="AnyIO process pool shutdown task", - ) - find_root_task().add_done_callback( - partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type] - ) - - @classmethod - async def connect_tcp( - cls, host: str, port: int, local_address: IPSockAddrType | None = None - ) -> abc.SocketStream: - transport, protocol = cast( - tuple[asyncio.Transport, StreamProtocol], - await get_running_loop().create_connection( - StreamProtocol, host, port, local_addr=local_address - ), - ) - transport.pause_reading() - return SocketStream(transport, protocol) - - @classmethod - async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: - await cls.checkpoint() - loop = get_running_loop() - raw_socket = socket.socket(socket.AF_UNIX) - raw_socket.setblocking(False) - while True: - try: - raw_socket.connect(path) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - loop.add_writer(raw_socket, f.set_result, None) - f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) - await f - except BaseException: - raw_socket.close() - raise - else: - return UNIXSocketStream(raw_socket) - - @classmethod - def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: - return TCPSocketListener(sock) - - @classmethod - def create_unix_listener(cls, sock: socket.socket) -> SocketListener: - return UNIXSocketListener(sock) - - @classmethod - async def create_udp_socket( - cls, - family: AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, - ) -> UDPSocket | ConnectedUDPSocket: - transport, protocol = await get_running_loop().create_datagram_endpoint( - DatagramProtocol, - local_addr=local_address, - remote_addr=remote_address, - family=family, - reuse_port=reuse_port, - ) - if protocol.exception: - transport.close() - raise protocol.exception - - if not remote_address: - return UDPSocket(transport, protocol) - else: - return ConnectedUDPSocket(transport, protocol) - - @classmethod - async def create_unix_datagram_socket( # type: ignore[override] - cls, raw_socket: socket.socket, remote_path: str | bytes | None - ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: - await cls.checkpoint() - loop = get_running_loop() - - if remote_path: - while True: - try: - raw_socket.connect(remote_path) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - loop.add_writer(raw_socket, f.set_result, None) - f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) - await f - except BaseException: - raw_socket.close() - raise - else: - return ConnectedUNIXDatagramSocket(raw_socket) - else: - return UNIXDatagramSocket(raw_socket) - - @classmethod - async def getaddrinfo( - cls, - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, - ) -> Sequence[ - tuple[ - AddressFamily, - SocketKind, - int, - str, - tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], - ] - ]: - return await get_running_loop().getaddrinfo( - host, port, family=family, type=type, proto=proto, flags=flags - ) - - @classmethod - async def getnameinfo( - cls, sockaddr: IPSockAddrType, flags: int = 0 - ) -> tuple[str, str]: - return await get_running_loop().getnameinfo(sockaddr, flags) - - @classmethod - async def wait_readable(cls, obj: FileDescriptorLike) -> None: - await cls.checkpoint() - try: - read_events = _read_events.get() - except LookupError: - read_events = {} - _read_events.set(read_events) - - if not isinstance(obj, int): - obj = obj.fileno() - - if read_events.get(obj): - raise BusyResourceError("reading from") - - loop = get_running_loop() - event = asyncio.Event() - try: - loop.add_reader(obj, event.set) - except NotImplementedError: - from anyio._core._asyncio_selector_thread import get_selector - - selector = get_selector() - selector.add_reader(obj, event.set) - remove_reader = selector.remove_reader - else: - remove_reader = loop.remove_reader - - read_events[obj] = event - try: - await event.wait() - finally: - remove_reader(obj) - del read_events[obj] - - @classmethod - async def wait_writable(cls, obj: FileDescriptorLike) -> None: - await cls.checkpoint() - try: - write_events = _write_events.get() - except LookupError: - write_events = {} - _write_events.set(write_events) - - if not isinstance(obj, int): - obj = obj.fileno() - - if write_events.get(obj): - raise BusyResourceError("writing to") - - loop = get_running_loop() - event = asyncio.Event() - try: - loop.add_writer(obj, event.set) - except NotImplementedError: - from anyio._core._asyncio_selector_thread import get_selector - - selector = get_selector() - selector.add_writer(obj, event.set) - remove_writer = selector.remove_writer - else: - remove_writer = loop.remove_writer - - write_events[obj] = event - try: - await event.wait() - finally: - del write_events[obj] - remove_writer(obj) - - @classmethod - def current_default_thread_limiter(cls) -> CapacityLimiter: - try: - return _default_thread_limiter.get() - except LookupError: - limiter = CapacityLimiter(40) - _default_thread_limiter.set(limiter) - return limiter - - @classmethod - def open_signal_receiver( - cls, *signals: Signals - ) -> AbstractContextManager[AsyncIterator[Signals]]: - return _SignalReceiver(signals) - - @classmethod - def get_current_task(cls) -> TaskInfo: - return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] - - @classmethod - def get_running_tasks(cls) -> Sequence[TaskInfo]: - return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] - - @classmethod - async def wait_all_tasks_blocked(cls) -> None: - await cls.checkpoint() - this_task = current_task() - while True: - for task in all_tasks(): - if task is this_task: - continue - - waiter = task._fut_waiter # type: ignore[attr-defined] - if waiter is None or waiter.done(): - await sleep(0.1) - break - else: - return - - @classmethod - def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: - return TestRunner(**options) - - -backend_class = AsyncIOBackend diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py b/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py deleted file mode 100644 index b80cc04f..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py +++ /dev/null @@ -1,1334 +0,0 @@ -from __future__ import annotations - -import array -import math -import os -import socket -import sys -import types -import weakref -from collections.abc import ( - AsyncGenerator, - AsyncIterator, - Awaitable, - Callable, - Collection, - Coroutine, - Iterable, - Sequence, -) -from concurrent.futures import Future -from contextlib import AbstractContextManager -from dataclasses import dataclass -from functools import partial -from io import IOBase -from os import PathLike -from signal import Signals -from socket import AddressFamily, SocketKind -from types import TracebackType -from typing import ( - IO, - TYPE_CHECKING, - Any, - Generic, - NoReturn, - TypeVar, - cast, - overload, -) - -import trio.from_thread -import trio.lowlevel -from outcome import Error, Outcome, Value -from trio.lowlevel import ( - current_root_task, - current_task, - wait_readable, - wait_writable, -) -from trio.socket import SocketType as TrioSocketType -from trio.to_thread import run_sync - -from .. import ( - CapacityLimiterStatistics, - EventStatistics, - LockStatistics, - TaskInfo, - WouldBlock, - abc, -) -from .._core._eventloop import claim_worker_thread -from .._core._exceptions import ( - BrokenResourceError, - BusyResourceError, - ClosedResourceError, - EndOfStream, -) -from .._core._sockets import convert_ipv6_sockaddr -from .._core._streams import create_memory_object_stream -from .._core._synchronization import ( - CapacityLimiter as BaseCapacityLimiter, -) -from .._core._synchronization import Event as BaseEvent -from .._core._synchronization import Lock as BaseLock -from .._core._synchronization import ( - ResourceGuard, - SemaphoreStatistics, -) -from .._core._synchronization import Semaphore as BaseSemaphore -from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType -from ..abc._eventloop import AsyncBackend, StrOrBytesPath -from ..streams.memory import MemoryObjectSendStream - -if TYPE_CHECKING: - from _typeshed import HasFileno - -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from exceptiongroup import BaseExceptionGroup - from typing_extensions import TypeVarTuple, Unpack - -T = TypeVar("T") -T_Retval = TypeVar("T_Retval") -T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) -PosArgsT = TypeVarTuple("PosArgsT") -P = ParamSpec("P") - - -# -# Event loop -# - -RunVar = trio.lowlevel.RunVar - - -# -# Timeouts and cancellation -# - - -class CancelScope(BaseCancelScope): - def __new__( - cls, original: trio.CancelScope | None = None, **kwargs: object - ) -> CancelScope: - return object.__new__(cls) - - def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: - self.__original = original or trio.CancelScope(**kwargs) - - def __enter__(self) -> CancelScope: - self.__original.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool: - return self.__original.__exit__(exc_type, exc_val, exc_tb) - - def cancel(self) -> None: - self.__original.cancel() - - @property - def deadline(self) -> float: - return self.__original.deadline - - @deadline.setter - def deadline(self, value: float) -> None: - self.__original.deadline = value - - @property - def cancel_called(self) -> bool: - return self.__original.cancel_called - - @property - def cancelled_caught(self) -> bool: - return self.__original.cancelled_caught - - @property - def shield(self) -> bool: - return self.__original.shield - - @shield.setter - def shield(self, value: bool) -> None: - self.__original.shield = value - - -# -# Task groups -# - - -class TaskGroup(abc.TaskGroup): - def __init__(self) -> None: - self._active = False - self._nursery_manager = trio.open_nursery(strict_exception_groups=True) - self.cancel_scope = None # type: ignore[assignment] - - async def __aenter__(self) -> TaskGroup: - self._active = True - self._nursery = await self._nursery_manager.__aenter__() - self.cancel_scope = CancelScope(self._nursery.cancel_scope) - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool: - try: - # trio.Nursery.__exit__ returns bool; .open_nursery has wrong type - return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value] - except BaseExceptionGroup as exc: - if not exc.split(trio.Cancelled)[1]: - raise trio.Cancelled._create() from exc - - raise - finally: - del exc_val, exc_tb - self._active = False - - def start_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> None: - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - self._nursery.start_soon(func, *args, name=name) - - async def start( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> Any: - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - return await self._nursery.start(func, *args, name=name) - - -# -# Threads -# - - -class BlockingPortal(abc.BlockingPortal): - def __new__(cls) -> BlockingPortal: - return object.__new__(cls) - - def __init__(self) -> None: - super().__init__() - self._token = trio.lowlevel.current_trio_token() - - def _spawn_task_from_thread( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - name: object, - future: Future[T_Retval], - ) -> None: - trio.from_thread.run_sync( - partial(self._task_group.start_soon, name=name), - self._call_func, - func, - args, - kwargs, - future, - trio_token=self._token, - ) - - -# -# Subprocesses -# - - -@dataclass(eq=False) -class ReceiveStreamWrapper(abc.ByteReceiveStream): - _stream: trio.abc.ReceiveStream - - async def receive(self, max_bytes: int | None = None) -> bytes: - try: - data = await self._stream.receive_some(max_bytes) - except trio.ClosedResourceError as exc: - raise ClosedResourceError from exc.__cause__ - except trio.BrokenResourceError as exc: - raise BrokenResourceError from exc.__cause__ - - if data: - return data - else: - raise EndOfStream - - async def aclose(self) -> None: - await self._stream.aclose() - - -@dataclass(eq=False) -class SendStreamWrapper(abc.ByteSendStream): - _stream: trio.abc.SendStream - - async def send(self, item: bytes) -> None: - try: - await self._stream.send_all(item) - except trio.ClosedResourceError as exc: - raise ClosedResourceError from exc.__cause__ - except trio.BrokenResourceError as exc: - raise BrokenResourceError from exc.__cause__ - - async def aclose(self) -> None: - await self._stream.aclose() - - -@dataclass(eq=False) -class Process(abc.Process): - _process: trio.Process - _stdin: abc.ByteSendStream | None - _stdout: abc.ByteReceiveStream | None - _stderr: abc.ByteReceiveStream | None - - async def aclose(self) -> None: - with CancelScope(shield=True): - if self._stdin: - await self._stdin.aclose() - if self._stdout: - await self._stdout.aclose() - if self._stderr: - await self._stderr.aclose() - - try: - await self.wait() - except BaseException: - self.kill() - with CancelScope(shield=True): - await self.wait() - raise - - async def wait(self) -> int: - return await self._process.wait() - - def terminate(self) -> None: - self._process.terminate() - - def kill(self) -> None: - self._process.kill() - - def send_signal(self, signal: Signals) -> None: - self._process.send_signal(signal) - - @property - def pid(self) -> int: - return self._process.pid - - @property - def returncode(self) -> int | None: - return self._process.returncode - - @property - def stdin(self) -> abc.ByteSendStream | None: - return self._stdin - - @property - def stdout(self) -> abc.ByteReceiveStream | None: - return self._stdout - - @property - def stderr(self) -> abc.ByteReceiveStream | None: - return self._stderr - - -class _ProcessPoolShutdownInstrument(trio.abc.Instrument): - def after_run(self) -> None: - super().after_run() - - -current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( - "current_default_worker_process_limiter" -) - - -async def _shutdown_process_pool(workers: set[abc.Process]) -> None: - try: - await trio.sleep(math.inf) - except trio.Cancelled: - for process in workers: - if process.returncode is None: - process.kill() - - with CancelScope(shield=True): - for process in workers: - await process.aclose() - - -# -# Sockets and networking -# - - -class _TrioSocketMixin(Generic[T_SockAddr]): - def __init__(self, trio_socket: TrioSocketType) -> None: - self._trio_socket = trio_socket - self._closed = False - - def _check_closed(self) -> None: - if self._closed: - raise ClosedResourceError - if self._trio_socket.fileno() < 0: - raise BrokenResourceError - - @property - def _raw_socket(self) -> socket.socket: - return self._trio_socket._sock # type: ignore[attr-defined] - - async def aclose(self) -> None: - if self._trio_socket.fileno() >= 0: - self._closed = True - self._trio_socket.close() - - def _convert_socket_error(self, exc: BaseException) -> NoReturn: - if isinstance(exc, trio.ClosedResourceError): - raise ClosedResourceError from exc - elif self._trio_socket.fileno() < 0 and self._closed: - raise ClosedResourceError from None - elif isinstance(exc, OSError): - raise BrokenResourceError from exc - else: - raise exc - - -class SocketStream(_TrioSocketMixin, abc.SocketStream): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self, max_bytes: int = 65536) -> bytes: - with self._receive_guard: - try: - data = await self._trio_socket.recv(max_bytes) - except BaseException as exc: - self._convert_socket_error(exc) - - if data: - return data - else: - raise EndOfStream - - async def send(self, item: bytes) -> None: - with self._send_guard: - view = memoryview(item) - while view: - try: - bytes_sent = await self._trio_socket.send(view) - except BaseException as exc: - self._convert_socket_error(exc) - - view = view[bytes_sent:] - - async def send_eof(self) -> None: - self._trio_socket.shutdown(socket.SHUT_WR) - - -class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - if not isinstance(msglen, int) or msglen < 0: - raise ValueError("msglen must be a non-negative integer") - if not isinstance(maxfds, int) or maxfds < 1: - raise ValueError("maxfds must be a positive integer") - - fds = array.array("i") - await trio.lowlevel.checkpoint() - with self._receive_guard: - while True: - try: - message, ancdata, flags, addr = await self._trio_socket.recvmsg( - msglen, socket.CMSG_LEN(maxfds * fds.itemsize) - ) - except BaseException as exc: - self._convert_socket_error(exc) - else: - if not message and not ancdata: - raise EndOfStream - - break - - for cmsg_level, cmsg_type, cmsg_data in ancdata: - if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: - raise RuntimeError( - f"Received unexpected ancillary data; message = {message!r}, " - f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" - ) - - fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) - - return message, list(fds) - - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - if not message: - raise ValueError("message must not be empty") - if not fds: - raise ValueError("fds must not be empty") - - filenos: list[int] = [] - for fd in fds: - if isinstance(fd, int): - filenos.append(fd) - elif isinstance(fd, IOBase): - filenos.append(fd.fileno()) - - fdarray = array.array("i", filenos) - await trio.lowlevel.checkpoint() - with self._send_guard: - while True: - try: - await self._trio_socket.sendmsg( - [message], - [ - ( - socket.SOL_SOCKET, - socket.SCM_RIGHTS, - fdarray, - ) - ], - ) - break - except BaseException as exc: - self._convert_socket_error(exc) - - -class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - super().__init__(trio.socket.from_stdlib_socket(raw_socket)) - self._accept_guard = ResourceGuard("accepting connections from") - - async def accept(self) -> SocketStream: - with self._accept_guard: - try: - trio_socket, _addr = await self._trio_socket.accept() - except BaseException as exc: - self._convert_socket_error(exc) - - trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - return SocketStream(trio_socket) - - -class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - super().__init__(trio.socket.from_stdlib_socket(raw_socket)) - self._accept_guard = ResourceGuard("accepting connections from") - - async def accept(self) -> UNIXSocketStream: - with self._accept_guard: - try: - trio_socket, _addr = await self._trio_socket.accept() - except BaseException as exc: - self._convert_socket_error(exc) - - return UNIXSocketStream(trio_socket) - - -class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> tuple[bytes, IPSockAddrType]: - with self._receive_guard: - try: - data, addr = await self._trio_socket.recvfrom(65536) - return data, convert_ipv6_sockaddr(addr) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: UDPPacketType) -> None: - with self._send_guard: - try: - await self._trio_socket.sendto(*item) - except BaseException as exc: - self._convert_socket_error(exc) - - -class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> bytes: - with self._receive_guard: - try: - return await self._trio_socket.recv(65536) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: bytes) -> None: - with self._send_guard: - try: - await self._trio_socket.send(item) - except BaseException as exc: - self._convert_socket_error(exc) - - -class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> UNIXDatagramPacketType: - with self._receive_guard: - try: - data, addr = await self._trio_socket.recvfrom(65536) - return data, addr - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: UNIXDatagramPacketType) -> None: - with self._send_guard: - try: - await self._trio_socket.sendto(*item) - except BaseException as exc: - self._convert_socket_error(exc) - - -class ConnectedUNIXDatagramSocket( - _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket -): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> bytes: - with self._receive_guard: - try: - return await self._trio_socket.recv(65536) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: bytes) -> None: - with self._send_guard: - try: - await self._trio_socket.send(item) - except BaseException as exc: - self._convert_socket_error(exc) - - -# -# Synchronization -# - - -class Event(BaseEvent): - def __new__(cls) -> Event: - return object.__new__(cls) - - def __init__(self) -> None: - self.__original = trio.Event() - - def is_set(self) -> bool: - return self.__original.is_set() - - async def wait(self) -> None: - return await self.__original.wait() - - def statistics(self) -> EventStatistics: - orig_statistics = self.__original.statistics() - return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) - - def set(self) -> None: - self.__original.set() - - -class Lock(BaseLock): - def __new__(cls, *, fast_acquire: bool = False) -> Lock: - return object.__new__(cls) - - def __init__(self, *, fast_acquire: bool = False) -> None: - self._fast_acquire = fast_acquire - self.__original = trio.Lock() - - @staticmethod - def _convert_runtime_error_msg(exc: RuntimeError) -> None: - if exc.args == ("attempt to re-acquire an already held Lock",): - exc.args = ("Attempted to acquire an already held Lock",) - - async def acquire(self) -> None: - if not self._fast_acquire: - try: - await self.__original.acquire() - except RuntimeError as exc: - self._convert_runtime_error_msg(exc) - raise - - return - - # This is the "fast path" where we don't let other tasks run - await trio.lowlevel.checkpoint_if_cancelled() - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - await self.__original._lot.park() - except RuntimeError as exc: - self._convert_runtime_error_msg(exc) - raise - - def acquire_nowait(self) -> None: - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - raise WouldBlock from None - except RuntimeError as exc: - self._convert_runtime_error_msg(exc) - raise - - def locked(self) -> bool: - return self.__original.locked() - - def release(self) -> None: - self.__original.release() - - def statistics(self) -> LockStatistics: - orig_statistics = self.__original.statistics() - owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None - return LockStatistics( - orig_statistics.locked, owner, orig_statistics.tasks_waiting - ) - - -class Semaphore(BaseSemaphore): - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - return object.__new__(cls) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> None: - super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) - self.__original = trio.Semaphore(initial_value, max_value=max_value) - - async def acquire(self) -> None: - if not self._fast_acquire: - await self.__original.acquire() - return - - # This is the "fast path" where we don't let other tasks run - await trio.lowlevel.checkpoint_if_cancelled() - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - await self.__original._lot.park() - - def acquire_nowait(self) -> None: - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - raise WouldBlock from None - - @property - def max_value(self) -> int | None: - return self.__original.max_value - - @property - def value(self) -> int: - return self.__original.value - - def release(self) -> None: - self.__original.release() - - def statistics(self) -> SemaphoreStatistics: - orig_statistics = self.__original.statistics() - return SemaphoreStatistics(orig_statistics.tasks_waiting) - - -class CapacityLimiter(BaseCapacityLimiter): - def __new__( - cls, - total_tokens: float | None = None, - *, - original: trio.CapacityLimiter | None = None, - ) -> CapacityLimiter: - return object.__new__(cls) - - def __init__( - self, - total_tokens: float | None = None, - *, - original: trio.CapacityLimiter | None = None, - ) -> None: - if original is not None: - self.__original = original - else: - assert total_tokens is not None - self.__original = trio.CapacityLimiter(total_tokens) - - async def __aenter__(self) -> None: - return await self.__original.__aenter__() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.__original.__aexit__(exc_type, exc_val, exc_tb) - - @property - def total_tokens(self) -> float: - return self.__original.total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - self.__original.total_tokens = value - - @property - def borrowed_tokens(self) -> int: - return self.__original.borrowed_tokens - - @property - def available_tokens(self) -> float: - return self.__original.available_tokens - - def acquire_nowait(self) -> None: - self.__original.acquire_nowait() - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - self.__original.acquire_on_behalf_of_nowait(borrower) - - async def acquire(self) -> None: - await self.__original.acquire() - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await self.__original.acquire_on_behalf_of(borrower) - - def release(self) -> None: - return self.__original.release() - - def release_on_behalf_of(self, borrower: object) -> None: - return self.__original.release_on_behalf_of(borrower) - - def statistics(self) -> CapacityLimiterStatistics: - orig = self.__original.statistics() - return CapacityLimiterStatistics( - borrowed_tokens=orig.borrowed_tokens, - total_tokens=orig.total_tokens, - borrowers=tuple(orig.borrowers), - tasks_waiting=orig.tasks_waiting, - ) - - -_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") - - -# -# Signal handling -# - - -class _SignalReceiver: - _iterator: AsyncIterator[int] - - def __init__(self, signals: tuple[Signals, ...]): - self._signals = signals - - def __enter__(self) -> _SignalReceiver: - self._cm = trio.open_signal_receiver(*self._signals) - self._iterator = self._cm.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return self._cm.__exit__(exc_type, exc_val, exc_tb) - - def __aiter__(self) -> _SignalReceiver: - return self - - async def __anext__(self) -> Signals: - signum = await self._iterator.__anext__() - return Signals(signum) - - -# -# Testing and debugging -# - - -class TestRunner(abc.TestRunner): - def __init__(self, **options: Any) -> None: - from queue import Queue - - self._call_queue: Queue[Callable[[], object]] = Queue() - self._send_stream: MemoryObjectSendStream | None = None - self._options = options - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: types.TracebackType | None, - ) -> None: - if self._send_stream: - self._send_stream.close() - while self._send_stream is not None: - self._call_queue.get()() - - async def _run_tests_and_fixtures(self) -> None: - self._send_stream, receive_stream = create_memory_object_stream(1) - with receive_stream: - async for coro, outcome_holder in receive_stream: - try: - retval = await coro - except BaseException as exc: - outcome_holder.append(Error(exc)) - else: - outcome_holder.append(Value(retval)) - - def _main_task_finished(self, outcome: object) -> None: - self._send_stream = None - - def _call_in_runner_task( - self, - func: Callable[P, Awaitable[T_Retval]], - *args: P.args, - **kwargs: P.kwargs, - ) -> T_Retval: - if self._send_stream is None: - trio.lowlevel.start_guest_run( - self._run_tests_and_fixtures, - run_sync_soon_threadsafe=self._call_queue.put, - done_callback=self._main_task_finished, - **self._options, - ) - while self._send_stream is None: - self._call_queue.get()() - - outcome_holder: list[Outcome] = [] - self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) - while not outcome_holder: - self._call_queue.get()() - - return outcome_holder[0].unwrap() - - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], - kwargs: dict[str, Any], - ) -> Iterable[T_Retval]: - asyncgen = fixture_func(**kwargs) - fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) - - yield fixturevalue - - try: - self._call_in_runner_task(asyncgen.asend, None) - except StopAsyncIteration: - pass - else: - self._call_in_runner_task(asyncgen.aclose) - raise RuntimeError("Async generator fixture did not stop") - - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], - kwargs: dict[str, Any], - ) -> T_Retval: - return self._call_in_runner_task(fixture_func, **kwargs) - - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - self._call_in_runner_task(test_func, **kwargs) - - -class TrioTaskInfo(TaskInfo): - def __init__(self, task: trio.lowlevel.Task): - parent_id = None - if task.parent_nursery and task.parent_nursery.parent_task: - parent_id = id(task.parent_nursery.parent_task) - - super().__init__(id(task), parent_id, task.name, task.coro) - self._task = weakref.proxy(task) - - def has_pending_cancellation(self) -> bool: - try: - return self._task._cancel_status.effectively_cancelled - except ReferenceError: - # If the task is no longer around, it surely doesn't have a cancellation - # pending - return False - - -class TrioBackend(AsyncBackend): - @classmethod - def run( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - options: dict[str, Any], - ) -> T_Retval: - return trio.run(func, *args) - - @classmethod - def current_token(cls) -> object: - return trio.lowlevel.current_trio_token() - - @classmethod - def current_time(cls) -> float: - return trio.current_time() - - @classmethod - def cancelled_exception_class(cls) -> type[BaseException]: - return trio.Cancelled - - @classmethod - async def checkpoint(cls) -> None: - await trio.lowlevel.checkpoint() - - @classmethod - async def checkpoint_if_cancelled(cls) -> None: - await trio.lowlevel.checkpoint_if_cancelled() - - @classmethod - async def cancel_shielded_checkpoint(cls) -> None: - await trio.lowlevel.cancel_shielded_checkpoint() - - @classmethod - async def sleep(cls, delay: float) -> None: - await trio.sleep(delay) - - @classmethod - def create_cancel_scope( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> abc.CancelScope: - return CancelScope(deadline=deadline, shield=shield) - - @classmethod - def current_effective_deadline(cls) -> float: - return trio.current_effective_deadline() - - @classmethod - def create_task_group(cls) -> abc.TaskGroup: - return TaskGroup() - - @classmethod - def create_event(cls) -> abc.Event: - return Event() - - @classmethod - def create_lock(cls, *, fast_acquire: bool) -> Lock: - return Lock(fast_acquire=fast_acquire) - - @classmethod - def create_semaphore( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> abc.Semaphore: - return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) - - @classmethod - def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: - return CapacityLimiter(total_tokens) - - @classmethod - async def run_sync_in_worker_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - abandon_on_cancel: bool = False, - limiter: abc.CapacityLimiter | None = None, - ) -> T_Retval: - def wrapper() -> T_Retval: - with claim_worker_thread(TrioBackend, token): - return func(*args) - - token = TrioBackend.current_token() - return await run_sync( - wrapper, - abandon_on_cancel=abandon_on_cancel, - limiter=cast(trio.CapacityLimiter, limiter), - ) - - @classmethod - def check_cancelled(cls) -> None: - trio.from_thread.check_cancelled() - - @classmethod - def run_async_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - return trio.from_thread.run(func, *args) - - @classmethod - def run_sync_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - return trio.from_thread.run_sync(func, *args) - - @classmethod - def create_blocking_portal(cls) -> abc.BlockingPortal: - return BlockingPortal() - - @classmethod - async def open_process( - cls, - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - **kwargs: Any, - ) -> Process: - def convert_item(item: StrOrBytesPath) -> str: - str_or_bytes = os.fspath(item) - if isinstance(str_or_bytes, str): - return str_or_bytes - else: - return os.fsdecode(str_or_bytes) - - if isinstance(command, (str, bytes, PathLike)): - process = await trio.lowlevel.open_process( - convert_item(command), - stdin=stdin, - stdout=stdout, - stderr=stderr, - shell=True, - **kwargs, - ) - else: - process = await trio.lowlevel.open_process( - [convert_item(item) for item in command], - stdin=stdin, - stdout=stdout, - stderr=stderr, - shell=False, - **kwargs, - ) - - stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None - stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None - stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - @classmethod - def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: - trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) - - @classmethod - async def connect_tcp( - cls, host: str, port: int, local_address: IPSockAddrType | None = None - ) -> SocketStream: - family = socket.AF_INET6 if ":" in host else socket.AF_INET - trio_socket = trio.socket.socket(family) - trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - if local_address: - await trio_socket.bind(local_address) - - try: - await trio_socket.connect((host, port)) - except BaseException: - trio_socket.close() - raise - - return SocketStream(trio_socket) - - @classmethod - async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: - trio_socket = trio.socket.socket(socket.AF_UNIX) - try: - await trio_socket.connect(path) - except BaseException: - trio_socket.close() - raise - - return UNIXSocketStream(trio_socket) - - @classmethod - def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: - return TCPSocketListener(sock) - - @classmethod - def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: - return UNIXSocketListener(sock) - - @classmethod - async def create_udp_socket( - cls, - family: socket.AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, - ) -> UDPSocket | ConnectedUDPSocket: - trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) - - if reuse_port: - trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - - if local_address: - await trio_socket.bind(local_address) - - if remote_address: - await trio_socket.connect(remote_address) - return ConnectedUDPSocket(trio_socket) - else: - return UDPSocket(trio_socket) - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket.socket, remote_path: None - ) -> abc.UNIXDatagramSocket: ... - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket.socket, remote_path: str | bytes - ) -> abc.ConnectedUNIXDatagramSocket: ... - - @classmethod - async def create_unix_datagram_socket( - cls, raw_socket: socket.socket, remote_path: str | bytes | None - ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: - trio_socket = trio.socket.from_stdlib_socket(raw_socket) - - if remote_path: - await trio_socket.connect(remote_path) - return ConnectedUNIXDatagramSocket(trio_socket) - else: - return UNIXDatagramSocket(trio_socket) - - @classmethod - async def getaddrinfo( - cls, - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, - ) -> Sequence[ - tuple[ - AddressFamily, - SocketKind, - int, - str, - tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], - ] - ]: - return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) - - @classmethod - async def getnameinfo( - cls, sockaddr: IPSockAddrType, flags: int = 0 - ) -> tuple[str, str]: - return await trio.socket.getnameinfo(sockaddr, flags) - - @classmethod - async def wait_readable(cls, obj: HasFileno | int) -> None: - try: - await wait_readable(obj) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("reading from") from None - - @classmethod - async def wait_writable(cls, obj: HasFileno | int) -> None: - try: - await wait_writable(obj) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("writing to") from None - - @classmethod - def current_default_thread_limiter(cls) -> CapacityLimiter: - try: - return _capacity_limiter_wrapper.get() - except LookupError: - limiter = CapacityLimiter( - original=trio.to_thread.current_default_thread_limiter() - ) - _capacity_limiter_wrapper.set(limiter) - return limiter - - @classmethod - def open_signal_receiver( - cls, *signals: Signals - ) -> AbstractContextManager[AsyncIterator[Signals]]: - return _SignalReceiver(signals) - - @classmethod - def get_current_task(cls) -> TaskInfo: - task = current_task() - return TrioTaskInfo(task) - - @classmethod - def get_running_tasks(cls) -> Sequence[TaskInfo]: - root_task = current_root_task() - assert root_task - task_infos = [TrioTaskInfo(root_task)] - nurseries = root_task.child_nurseries - while nurseries: - new_nurseries: list[trio.Nursery] = [] - for nursery in nurseries: - for task in nursery.child_tasks: - task_infos.append(TrioTaskInfo(task)) - new_nurseries.extend(task.child_nurseries) - - nurseries = new_nurseries - - return task_infos - - @classmethod - async def wait_all_tasks_blocked(cls) -> None: - from trio.testing import wait_all_tasks_blocked - - await wait_all_tasks_blocked() - - @classmethod - def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: - return TestRunner(**options) - - -backend_class = TrioBackend diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__init__.py b/venv/lib/python3.12/site-packages/anyio/_core/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index cedc6d20..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc deleted file mode 100644 index 3809692a..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc deleted file mode 100644 index 40158c10..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc deleted file mode 100644 index 54512d98..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc deleted file mode 100644 index f8ca6b3c..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc deleted file mode 100644 index e802ca7c..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc deleted file mode 100644 index df13c5c0..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc deleted file mode 100644 index e395c173..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc deleted file mode 100644 index 6ee4b75f..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc deleted file mode 100644 index 684dfc5f..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc deleted file mode 100644 index 30215bb8..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc deleted file mode 100644 index c8ebe3f0..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc deleted file mode 100644 index 2e52d766..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc deleted file mode 100644 index 7aaf93fb..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc deleted file mode 100644 index 7ed35885..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py b/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py deleted file mode 100644 index 9f35bae5..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py +++ /dev/null @@ -1,167 +0,0 @@ -from __future__ import annotations - -import asyncio -import socket -import threading -from collections.abc import Callable -from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector -from typing import TYPE_CHECKING, Any - -if TYPE_CHECKING: - from _typeshed import FileDescriptorLike - -_selector_lock = threading.Lock() -_selector: Selector | None = None - - -class Selector: - def __init__(self) -> None: - self._thread = threading.Thread(target=self.run, name="AnyIO socket selector") - self._selector = DefaultSelector() - self._send, self._receive = socket.socketpair() - self._send.setblocking(False) - self._receive.setblocking(False) - # This somewhat reduces the amount of memory wasted queueing up data - # for wakeups. With these settings, maximum number of 1-byte sends - # before getting BlockingIOError: - # Linux 4.8: 6 - # macOS (darwin 15.5): 1 - # Windows 10: 525347 - # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send - # blocking, even on non-blocking sockets, so don't do that.) - self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1) - self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1) - # On Windows this is a TCP socket so this might matter. On other - # platforms this fails b/c AF_UNIX sockets aren't actually TCP. - try: - self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - except OSError: - pass - - self._selector.register(self._receive, EVENT_READ) - self._closed = False - - def start(self) -> None: - self._thread.start() - threading._register_atexit(self._stop) # type: ignore[attr-defined] - - def _stop(self) -> None: - global _selector - self._closed = True - self._notify_self() - self._send.close() - self._thread.join() - self._selector.unregister(self._receive) - self._receive.close() - self._selector.close() - _selector = None - assert not self._selector.get_map(), ( - "selector still has registered file descriptors after shutdown" - ) - - def _notify_self(self) -> None: - try: - self._send.send(b"\x00") - except BlockingIOError: - pass - - def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: - loop = asyncio.get_running_loop() - try: - key = self._selector.get_key(fd) - except KeyError: - self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)}) - else: - if EVENT_READ in key.data: - raise ValueError( - "this file descriptor is already registered for reading" - ) - - key.data[EVENT_READ] = loop, callback - self._selector.modify(fd, key.events | EVENT_READ, key.data) - - self._notify_self() - - def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: - loop = asyncio.get_running_loop() - try: - key = self._selector.get_key(fd) - except KeyError: - self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)}) - else: - if EVENT_WRITE in key.data: - raise ValueError( - "this file descriptor is already registered for writing" - ) - - key.data[EVENT_WRITE] = loop, callback - self._selector.modify(fd, key.events | EVENT_WRITE, key.data) - - self._notify_self() - - def remove_reader(self, fd: FileDescriptorLike) -> bool: - try: - key = self._selector.get_key(fd) - except KeyError: - return False - - if new_events := key.events ^ EVENT_READ: - del key.data[EVENT_READ] - self._selector.modify(fd, new_events, key.data) - else: - self._selector.unregister(fd) - - return True - - def remove_writer(self, fd: FileDescriptorLike) -> bool: - try: - key = self._selector.get_key(fd) - except KeyError: - return False - - if new_events := key.events ^ EVENT_WRITE: - del key.data[EVENT_WRITE] - self._selector.modify(fd, new_events, key.data) - else: - self._selector.unregister(fd) - - return True - - def run(self) -> None: - while not self._closed: - for key, events in self._selector.select(): - if key.fileobj is self._receive: - try: - while self._receive.recv(4096): - pass - except BlockingIOError: - pass - - continue - - if events & EVENT_READ: - loop, callback = key.data[EVENT_READ] - self.remove_reader(key.fd) - try: - loop.call_soon_threadsafe(callback) - except RuntimeError: - pass # the loop was already closed - - if events & EVENT_WRITE: - loop, callback = key.data[EVENT_WRITE] - self.remove_writer(key.fd) - try: - loop.call_soon_threadsafe(callback) - except RuntimeError: - pass # the loop was already closed - - -def get_selector() -> Selector: - global _selector - - with _selector_lock: - if _selector is None: - _selector = Selector() - _selector.start() - - return _selector diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py b/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py deleted file mode 100644 index 6dcb4589..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py +++ /dev/null @@ -1,166 +0,0 @@ -from __future__ import annotations - -import math -import sys -import threading -from collections.abc import Awaitable, Callable, Generator -from contextlib import contextmanager -from importlib import import_module -from typing import TYPE_CHECKING, Any, TypeVar - -import sniffio - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -if TYPE_CHECKING: - from ..abc import AsyncBackend - -# This must be updated when new backends are introduced -BACKENDS = "asyncio", "trio" - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") - -threadlocals = threading.local() -loaded_backends: dict[str, type[AsyncBackend]] = {} - - -def run( - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - *args: Unpack[PosArgsT], - backend: str = "asyncio", - backend_options: dict[str, Any] | None = None, -) -> T_Retval: - """ - Run the given coroutine function in an asynchronous event loop. - - The current thread must not be already running an event loop. - - :param func: a coroutine function - :param args: positional arguments to ``func`` - :param backend: name of the asynchronous event loop implementation – currently - either ``asyncio`` or ``trio`` - :param backend_options: keyword arguments to call the backend ``run()`` - implementation with (documented :ref:`here `) - :return: the return value of the coroutine function - :raises RuntimeError: if an asynchronous event loop is already running in this - thread - :raises LookupError: if the named backend is not found - - """ - try: - asynclib_name = sniffio.current_async_library() - except sniffio.AsyncLibraryNotFoundError: - pass - else: - raise RuntimeError(f"Already running {asynclib_name} in this thread") - - try: - async_backend = get_async_backend(backend) - except ImportError as exc: - raise LookupError(f"No such backend: {backend}") from exc - - token = None - if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the async - # library - token = sniffio.current_async_library_cvar.set(backend) - - try: - backend_options = backend_options or {} - return async_backend.run(func, args, {}, backend_options) - finally: - if token: - sniffio.current_async_library_cvar.reset(token) - - -async def sleep(delay: float) -> None: - """ - Pause the current task for the specified duration. - - :param delay: the duration, in seconds - - """ - return await get_async_backend().sleep(delay) - - -async def sleep_forever() -> None: - """ - Pause the current task until it's cancelled. - - This is a shortcut for ``sleep(math.inf)``. - - .. versionadded:: 3.1 - - """ - await sleep(math.inf) - - -async def sleep_until(deadline: float) -> None: - """ - Pause the current task until the given time. - - :param deadline: the absolute time to wake up at (according to the internal - monotonic clock of the event loop) - - .. versionadded:: 3.1 - - """ - now = current_time() - await sleep(max(deadline - now, 0)) - - -def current_time() -> float: - """ - Return the current value of the event loop's internal clock. - - :return: the clock value (seconds) - - """ - return get_async_backend().current_time() - - -def get_all_backends() -> tuple[str, ...]: - """Return a tuple of the names of all built-in backends.""" - return BACKENDS - - -def get_cancelled_exc_class() -> type[BaseException]: - """Return the current async library's cancellation exception class.""" - return get_async_backend().cancelled_exception_class() - - -# -# Private API -# - - -@contextmanager -def claim_worker_thread( - backend_class: type[AsyncBackend], token: object -) -> Generator[Any, None, None]: - threadlocals.current_async_backend = backend_class - threadlocals.current_token = token - try: - yield - finally: - del threadlocals.current_async_backend - del threadlocals.current_token - - -def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: - if asynclib_name is None: - asynclib_name = sniffio.current_async_library() - - # We use our own dict instead of sys.modules to get the already imported back-end - # class because the appropriate modules in sys.modules could potentially be only - # partially initialized - try: - return loaded_backends[asynclib_name] - except KeyError: - module = import_module(f"anyio._backends._{asynclib_name}") - loaded_backends[asynclib_name] = module.backend_class - return module.backend_class diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py b/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py deleted file mode 100644 index 16b94482..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py +++ /dev/null @@ -1,126 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import Generator -from textwrap import dedent -from typing import Any - -if sys.version_info < (3, 11): - from exceptiongroup import BaseExceptionGroup - - -class BrokenResourceError(Exception): - """ - Raised when trying to use a resource that has been rendered unusable due to external - causes (e.g. a send stream whose peer has disconnected). - """ - - -class BrokenWorkerProcess(Exception): - """ - Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or - otherwise misbehaves. - """ - - -class BrokenWorkerIntepreter(Exception): - """ - Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is - raised in the subinterpreter. - """ - - def __init__(self, excinfo: Any): - # This was adapted from concurrent.futures.interpreter.ExecutionFailed - msg = excinfo.formatted - if not msg: - if excinfo.type and excinfo.msg: - msg = f"{excinfo.type.__name__}: {excinfo.msg}" - else: - msg = excinfo.type.__name__ or excinfo.msg - - super().__init__(msg) - self.excinfo = excinfo - - def __str__(self) -> str: - try: - formatted = self.excinfo.errdisplay - except Exception: - return super().__str__() - else: - return dedent( - f""" - {super().__str__()} - - Uncaught in the interpreter: - - {formatted} - """.strip() - ) - - -class BusyResourceError(Exception): - """ - Raised when two tasks are trying to read from or write to the same resource - concurrently. - """ - - def __init__(self, action: str): - super().__init__(f"Another task is already {action} this resource") - - -class ClosedResourceError(Exception): - """Raised when trying to use a resource that has been closed.""" - - -class DelimiterNotFound(Exception): - """ - Raised during - :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the - maximum number of bytes has been read without the delimiter being found. - """ - - def __init__(self, max_bytes: int) -> None: - super().__init__( - f"The delimiter was not found among the first {max_bytes} bytes" - ) - - -class EndOfStream(Exception): - """ - Raised when trying to read from a stream that has been closed from the other end. - """ - - -class IncompleteRead(Exception): - """ - Raised during - :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or - :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the - connection is closed before the requested amount of bytes has been read. - """ - - def __init__(self) -> None: - super().__init__( - "The stream was closed before the read operation could be completed" - ) - - -class TypedAttributeLookupError(LookupError): - """ - Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute - is not found and no default value has been given. - """ - - -class WouldBlock(Exception): - """Raised by ``X_nowait`` functions if ``X()`` would block.""" - - -def iterate_exceptions( - exception: BaseException, -) -> Generator[BaseException, None, None]: - if isinstance(exception, BaseExceptionGroup): - for exc in exception.exceptions: - yield from iterate_exceptions(exc) - else: - yield exception diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py b/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py deleted file mode 100644 index a0d61984..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py +++ /dev/null @@ -1,742 +0,0 @@ -from __future__ import annotations - -import os -import pathlib -import sys -from collections.abc import ( - AsyncIterator, - Callable, - Iterable, - Iterator, - Sequence, -) -from dataclasses import dataclass -from functools import partial -from os import PathLike -from typing import ( - IO, - TYPE_CHECKING, - Any, - AnyStr, - ClassVar, - Final, - Generic, - overload, -) - -from .. import to_thread -from ..abc import AsyncResource - -if TYPE_CHECKING: - from types import ModuleType - - from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer -else: - ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object - - -class AsyncFile(AsyncResource, Generic[AnyStr]): - """ - An asynchronous file object. - - This class wraps a standard file object and provides async friendly versions of the - following blocking methods (where available on the original file object): - - * read - * read1 - * readline - * readlines - * readinto - * readinto1 - * write - * writelines - * truncate - * seek - * tell - * flush - - All other methods are directly passed through. - - This class supports the asynchronous context manager protocol which closes the - underlying file at the end of the context block. - - This class also supports asynchronous iteration:: - - async with await open_file(...) as f: - async for line in f: - print(line) - """ - - def __init__(self, fp: IO[AnyStr]) -> None: - self._fp: Any = fp - - def __getattr__(self, name: str) -> object: - return getattr(self._fp, name) - - @property - def wrapped(self) -> IO[AnyStr]: - """The wrapped file object.""" - return self._fp - - async def __aiter__(self) -> AsyncIterator[AnyStr]: - while True: - line = await self.readline() - if line: - yield line - else: - break - - async def aclose(self) -> None: - return await to_thread.run_sync(self._fp.close) - - async def read(self, size: int = -1) -> AnyStr: - return await to_thread.run_sync(self._fp.read, size) - - async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: - return await to_thread.run_sync(self._fp.read1, size) - - async def readline(self) -> AnyStr: - return await to_thread.run_sync(self._fp.readline) - - async def readlines(self) -> list[AnyStr]: - return await to_thread.run_sync(self._fp.readlines) - - async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int: - return await to_thread.run_sync(self._fp.readinto, b) - - async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int: - return await to_thread.run_sync(self._fp.readinto1, b) - - @overload - async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... - - @overload - async def write(self: AsyncFile[str], b: str) -> int: ... - - async def write(self, b: ReadableBuffer | str) -> int: - return await to_thread.run_sync(self._fp.write, b) - - @overload - async def writelines( - self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] - ) -> None: ... - - @overload - async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... - - async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: - return await to_thread.run_sync(self._fp.writelines, lines) - - async def truncate(self, size: int | None = None) -> int: - return await to_thread.run_sync(self._fp.truncate, size) - - async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: - return await to_thread.run_sync(self._fp.seek, offset, whence) - - async def tell(self) -> int: - return await to_thread.run_sync(self._fp.tell) - - async def flush(self) -> None: - return await to_thread.run_sync(self._fp.flush) - - -@overload -async def open_file( - file: str | PathLike[str] | int, - mode: OpenBinaryMode, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: Callable[[str, int], int] | None = ..., -) -> AsyncFile[bytes]: ... - - -@overload -async def open_file( - file: str | PathLike[str] | int, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: Callable[[str, int], int] | None = ..., -) -> AsyncFile[str]: ... - - -async def open_file( - file: str | PathLike[str] | int, - mode: str = "r", - buffering: int = -1, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - closefd: bool = True, - opener: Callable[[str, int], int] | None = None, -) -> AsyncFile[Any]: - """ - Open a file asynchronously. - - The arguments are exactly the same as for the builtin :func:`open`. - - :return: an asynchronous file object - - """ - fp = await to_thread.run_sync( - open, file, mode, buffering, encoding, errors, newline, closefd, opener - ) - return AsyncFile(fp) - - -def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: - """ - Wrap an existing file as an asynchronous file. - - :param file: an existing file-like object - :return: an asynchronous file object - - """ - return AsyncFile(file) - - -@dataclass(eq=False) -class _PathIterator(AsyncIterator["Path"]): - iterator: Iterator[PathLike[str]] - - async def __anext__(self) -> Path: - nextval = await to_thread.run_sync( - next, self.iterator, None, abandon_on_cancel=True - ) - if nextval is None: - raise StopAsyncIteration from None - - return Path(nextval) - - -class Path: - """ - An asynchronous version of :class:`pathlib.Path`. - - This class cannot be substituted for :class:`pathlib.Path` or - :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` - interface. - - It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for - the deprecated :meth:`~pathlib.Path.link_to` method. - - Some methods may be unavailable or have limited functionality, based on the Python - version: - - * :meth:`~pathlib.Path.copy` (available on Python 3.14 or later) - * :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later) - * :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later) - * :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later) - * :attr:`~pathlib.Path.info` (available on Python 3.14 or later) - * :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later) - * :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only - available on Python 3.13 or later) - * :meth:`~pathlib.Path.move` (available on Python 3.14 or later) - * :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later) - * :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available - on Python 3.12 or later) - * :meth:`~pathlib.Path.walk` (available on Python 3.12 or later) - - Any methods that do disk I/O need to be awaited on. These methods are: - - * :meth:`~pathlib.Path.absolute` - * :meth:`~pathlib.Path.chmod` - * :meth:`~pathlib.Path.cwd` - * :meth:`~pathlib.Path.exists` - * :meth:`~pathlib.Path.expanduser` - * :meth:`~pathlib.Path.group` - * :meth:`~pathlib.Path.hardlink_to` - * :meth:`~pathlib.Path.home` - * :meth:`~pathlib.Path.is_block_device` - * :meth:`~pathlib.Path.is_char_device` - * :meth:`~pathlib.Path.is_dir` - * :meth:`~pathlib.Path.is_fifo` - * :meth:`~pathlib.Path.is_file` - * :meth:`~pathlib.Path.is_junction` - * :meth:`~pathlib.Path.is_mount` - * :meth:`~pathlib.Path.is_socket` - * :meth:`~pathlib.Path.is_symlink` - * :meth:`~pathlib.Path.lchmod` - * :meth:`~pathlib.Path.lstat` - * :meth:`~pathlib.Path.mkdir` - * :meth:`~pathlib.Path.open` - * :meth:`~pathlib.Path.owner` - * :meth:`~pathlib.Path.read_bytes` - * :meth:`~pathlib.Path.read_text` - * :meth:`~pathlib.Path.readlink` - * :meth:`~pathlib.Path.rename` - * :meth:`~pathlib.Path.replace` - * :meth:`~pathlib.Path.resolve` - * :meth:`~pathlib.Path.rmdir` - * :meth:`~pathlib.Path.samefile` - * :meth:`~pathlib.Path.stat` - * :meth:`~pathlib.Path.symlink_to` - * :meth:`~pathlib.Path.touch` - * :meth:`~pathlib.Path.unlink` - * :meth:`~pathlib.Path.walk` - * :meth:`~pathlib.Path.write_bytes` - * :meth:`~pathlib.Path.write_text` - - Additionally, the following methods return an async iterator yielding - :class:`~.Path` objects: - - * :meth:`~pathlib.Path.glob` - * :meth:`~pathlib.Path.iterdir` - * :meth:`~pathlib.Path.rglob` - """ - - __slots__ = "_path", "__weakref__" - - __weakref__: Any - - def __init__(self, *args: str | PathLike[str]) -> None: - self._path: Final[pathlib.Path] = pathlib.Path(*args) - - def __fspath__(self) -> str: - return self._path.__fspath__() - - def __str__(self) -> str: - return self._path.__str__() - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.as_posix()!r})" - - def __bytes__(self) -> bytes: - return self._path.__bytes__() - - def __hash__(self) -> int: - return self._path.__hash__() - - def __eq__(self, other: object) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__eq__(target) - - def __lt__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__lt__(target) - - def __le__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__le__(target) - - def __gt__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__gt__(target) - - def __ge__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__ge__(target) - - def __truediv__(self, other: str | PathLike[str]) -> Path: - return Path(self._path / other) - - def __rtruediv__(self, other: str | PathLike[str]) -> Path: - return Path(other) / self - - @property - def parts(self) -> tuple[str, ...]: - return self._path.parts - - @property - def drive(self) -> str: - return self._path.drive - - @property - def root(self) -> str: - return self._path.root - - @property - def anchor(self) -> str: - return self._path.anchor - - @property - def parents(self) -> Sequence[Path]: - return tuple(Path(p) for p in self._path.parents) - - @property - def parent(self) -> Path: - return Path(self._path.parent) - - @property - def name(self) -> str: - return self._path.name - - @property - def suffix(self) -> str: - return self._path.suffix - - @property - def suffixes(self) -> list[str]: - return self._path.suffixes - - @property - def stem(self) -> str: - return self._path.stem - - async def absolute(self) -> Path: - path = await to_thread.run_sync(self._path.absolute) - return Path(path) - - def as_posix(self) -> str: - return self._path.as_posix() - - def as_uri(self) -> str: - return self._path.as_uri() - - if sys.version_info >= (3, 13): - parser: ClassVar[ModuleType] = pathlib.Path.parser - - @classmethod - def from_uri(cls, uri: str) -> Path: - return Path(pathlib.Path.from_uri(uri)) - - def full_match( - self, path_pattern: str, *, case_sensitive: bool | None = None - ) -> bool: - return self._path.full_match(path_pattern, case_sensitive=case_sensitive) - - def match( - self, path_pattern: str, *, case_sensitive: bool | None = None - ) -> bool: - return self._path.match(path_pattern, case_sensitive=case_sensitive) - else: - - def match(self, path_pattern: str) -> bool: - return self._path.match(path_pattern) - - if sys.version_info >= (3, 14): - - @property - def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it - return self._path.info - - async def copy( - self, - target: str | os.PathLike[str], - *, - follow_symlinks: bool = True, - dirs_exist_ok: bool = False, - preserve_metadata: bool = False, - ) -> Path: - func = partial( - self._path.copy, - follow_symlinks=follow_symlinks, - dirs_exist_ok=dirs_exist_ok, - preserve_metadata=preserve_metadata, - ) - return Path(await to_thread.run_sync(func, target)) - - async def copy_into( - self, - target_dir: str | os.PathLike[str], - *, - follow_symlinks: bool = True, - dirs_exist_ok: bool = False, - preserve_metadata: bool = False, - ) -> Path: - func = partial( - self._path.copy_into, - follow_symlinks=follow_symlinks, - dirs_exist_ok=dirs_exist_ok, - preserve_metadata=preserve_metadata, - ) - return Path(await to_thread.run_sync(func, target_dir)) - - async def move(self, target: str | os.PathLike[str]) -> Path: - # Upstream does not handle anyio.Path properly as a PathLike - target = pathlib.Path(target) - return Path(await to_thread.run_sync(self._path.move, target)) - - async def move_into( - self, - target_dir: str | os.PathLike[str], - ) -> Path: - return Path(await to_thread.run_sync(self._path.move_into, target_dir)) - - def is_relative_to(self, other: str | PathLike[str]) -> bool: - try: - self.relative_to(other) - return True - except ValueError: - return False - - async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: - func = partial(os.chmod, follow_symlinks=follow_symlinks) - return await to_thread.run_sync(func, self._path, mode) - - @classmethod - async def cwd(cls) -> Path: - path = await to_thread.run_sync(pathlib.Path.cwd) - return cls(path) - - async def exists(self) -> bool: - return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) - - async def expanduser(self) -> Path: - return Path( - await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) - ) - - def glob(self, pattern: str) -> AsyncIterator[Path]: - gen = self._path.glob(pattern) - return _PathIterator(gen) - - async def group(self) -> str: - return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) - - async def hardlink_to( - self, target: str | bytes | PathLike[str] | PathLike[bytes] - ) -> None: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(os.link, target, self) - - @classmethod - async def home(cls) -> Path: - home_path = await to_thread.run_sync(pathlib.Path.home) - return cls(home_path) - - def is_absolute(self) -> bool: - return self._path.is_absolute() - - async def is_block_device(self) -> bool: - return await to_thread.run_sync( - self._path.is_block_device, abandon_on_cancel=True - ) - - async def is_char_device(self) -> bool: - return await to_thread.run_sync( - self._path.is_char_device, abandon_on_cancel=True - ) - - async def is_dir(self) -> bool: - return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) - - async def is_fifo(self) -> bool: - return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) - - async def is_file(self) -> bool: - return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) - - if sys.version_info >= (3, 12): - - async def is_junction(self) -> bool: - return await to_thread.run_sync(self._path.is_junction) - - async def is_mount(self) -> bool: - return await to_thread.run_sync( - os.path.ismount, self._path, abandon_on_cancel=True - ) - - def is_reserved(self) -> bool: - return self._path.is_reserved() - - async def is_socket(self) -> bool: - return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) - - async def is_symlink(self) -> bool: - return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) - - async def iterdir(self) -> AsyncIterator[Path]: - gen = ( - self._path.iterdir() - if sys.version_info < (3, 13) - else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True) - ) - async for path in _PathIterator(gen): - yield path - - def joinpath(self, *args: str | PathLike[str]) -> Path: - return Path(self._path.joinpath(*args)) - - async def lchmod(self, mode: int) -> None: - await to_thread.run_sync(self._path.lchmod, mode) - - async def lstat(self) -> os.stat_result: - return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) - - async def mkdir( - self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False - ) -> None: - await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) - - @overload - async def open( - self, - mode: OpenBinaryMode, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - ) -> AsyncFile[bytes]: ... - - @overload - async def open( - self, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - ) -> AsyncFile[str]: ... - - async def open( - self, - mode: str = "r", - buffering: int = -1, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - ) -> AsyncFile[Any]: - fp = await to_thread.run_sync( - self._path.open, mode, buffering, encoding, errors, newline - ) - return AsyncFile(fp) - - async def owner(self) -> str: - return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) - - async def read_bytes(self) -> bytes: - return await to_thread.run_sync(self._path.read_bytes) - - async def read_text( - self, encoding: str | None = None, errors: str | None = None - ) -> str: - return await to_thread.run_sync(self._path.read_text, encoding, errors) - - if sys.version_info >= (3, 12): - - def relative_to( - self, *other: str | PathLike[str], walk_up: bool = False - ) -> Path: - return Path(self._path.relative_to(*other, walk_up=walk_up)) - - else: - - def relative_to(self, *other: str | PathLike[str]) -> Path: - return Path(self._path.relative_to(*other)) - - async def readlink(self) -> Path: - target = await to_thread.run_sync(os.readlink, self._path) - return Path(target) - - async def rename(self, target: str | pathlib.PurePath | Path) -> Path: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.rename, target) - return Path(target) - - async def replace(self, target: str | pathlib.PurePath | Path) -> Path: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.replace, target) - return Path(target) - - async def resolve(self, strict: bool = False) -> Path: - func = partial(self._path.resolve, strict=strict) - return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) - - def rglob(self, pattern: str) -> AsyncIterator[Path]: - gen = self._path.rglob(pattern) - return _PathIterator(gen) - - async def rmdir(self) -> None: - await to_thread.run_sync(self._path.rmdir) - - async def samefile(self, other_path: str | PathLike[str]) -> bool: - if isinstance(other_path, Path): - other_path = other_path._path - - return await to_thread.run_sync( - self._path.samefile, other_path, abandon_on_cancel=True - ) - - async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: - func = partial(os.stat, follow_symlinks=follow_symlinks) - return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) - - async def symlink_to( - self, - target: str | bytes | PathLike[str] | PathLike[bytes], - target_is_directory: bool = False, - ) -> None: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) - - async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: - await to_thread.run_sync(self._path.touch, mode, exist_ok) - - async def unlink(self, missing_ok: bool = False) -> None: - try: - await to_thread.run_sync(self._path.unlink) - except FileNotFoundError: - if not missing_ok: - raise - - if sys.version_info >= (3, 12): - - async def walk( - self, - top_down: bool = True, - on_error: Callable[[OSError], object] | None = None, - follow_symlinks: bool = False, - ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: - def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: - try: - return next(gen) - except StopIteration: - return None - - gen = self._path.walk(top_down, on_error, follow_symlinks) - while True: - value = await to_thread.run_sync(get_next_value) - if value is None: - return - - root, dirs, paths = value - yield Path(root), dirs, paths - - def with_name(self, name: str) -> Path: - return Path(self._path.with_name(name)) - - def with_stem(self, stem: str) -> Path: - return Path(self._path.with_name(stem + self._path.suffix)) - - def with_suffix(self, suffix: str) -> Path: - return Path(self._path.with_suffix(suffix)) - - def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: - return Path(*pathsegments) - - async def write_bytes(self, data: bytes) -> int: - return await to_thread.run_sync(self._path.write_bytes, data) - - async def write_text( - self, - data: str, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - ) -> int: - # Path.write_text() does not support the "newline" parameter before Python 3.10 - def sync_write_text() -> int: - with self._path.open( - "w", encoding=encoding, errors=errors, newline=newline - ) as fp: - return fp.write(data) - - return await to_thread.run_sync(sync_write_text) - - -PathLike.register(Path) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_resources.py b/venv/lib/python3.12/site-packages/anyio/_core/_resources.py deleted file mode 100644 index b9a5344a..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_resources.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from ..abc import AsyncResource -from ._tasks import CancelScope - - -async def aclose_forcefully(resource: AsyncResource) -> None: - """ - Close an asynchronous resource in a cancelled scope. - - Doing this closes the resource without waiting on anything. - - :param resource: the resource to close - - """ - with CancelScope() as scope: - scope.cancel() - await resource.aclose() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_signals.py b/venv/lib/python3.12/site-packages/anyio/_core/_signals.py deleted file mode 100644 index f3451d30..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_signals.py +++ /dev/null @@ -1,27 +0,0 @@ -from __future__ import annotations - -from collections.abc import AsyncIterator -from contextlib import AbstractContextManager -from signal import Signals - -from ._eventloop import get_async_backend - - -def open_signal_receiver( - *signals: Signals, -) -> AbstractContextManager[AsyncIterator[Signals]]: - """ - Start receiving operating system signals. - - :param signals: signals to receive (e.g. ``signal.SIGINT``) - :return: an asynchronous context manager for an asynchronous iterator which yields - signal numbers - - .. warning:: Windows does not support signals natively so it is best to avoid - relying on this in cross-platform applications. - - .. warning:: On asyncio, this permanently replaces any previous signal handler for - the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. - - """ - return get_async_backend().open_signal_receiver(*signals) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py b/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py deleted file mode 100644 index 054bcdda..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py +++ /dev/null @@ -1,792 +0,0 @@ -from __future__ import annotations - -import errno -import os -import socket -import ssl -import stat -import sys -from collections.abc import Awaitable -from ipaddress import IPv6Address, ip_address -from os import PathLike, chmod -from socket import AddressFamily, SocketKind -from typing import TYPE_CHECKING, Any, Literal, cast, overload - -from .. import to_thread -from ..abc import ( - ConnectedUDPSocket, - ConnectedUNIXDatagramSocket, - IPAddressType, - IPSockAddrType, - SocketListener, - SocketStream, - UDPSocket, - UNIXDatagramSocket, - UNIXSocketStream, -) -from ..streams.stapled import MultiListener -from ..streams.tls import TLSStream -from ._eventloop import get_async_backend -from ._resources import aclose_forcefully -from ._synchronization import Event -from ._tasks import create_task_group, move_on_after - -if TYPE_CHECKING: - from _typeshed import FileDescriptorLike -else: - FileDescriptorLike = object - -if sys.version_info < (3, 11): - from exceptiongroup import ExceptionGroup - -if sys.version_info < (3, 13): - from typing_extensions import deprecated -else: - from warnings import deprecated - -IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 - -AnyIPAddressFamily = Literal[ - AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 -] -IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] - - -# tls_hostname given -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str, - happy_eyeballs_delay: float = ..., -) -> TLSStream: ... - - -# ssl_context given -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - ssl_context: ssl.SSLContext, - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> TLSStream: ... - - -# tls=True -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - tls: Literal[True], - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> TLSStream: ... - - -# tls=False -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - tls: Literal[False], - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> SocketStream: ... - - -# No TLS arguments -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - happy_eyeballs_delay: float = ..., -) -> SocketStream: ... - - -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = None, - tls: bool = False, - ssl_context: ssl.SSLContext | None = None, - tls_standard_compatible: bool = True, - tls_hostname: str | None = None, - happy_eyeballs_delay: float = 0.25, -) -> SocketStream | TLSStream: - """ - Connect to a host using the TCP protocol. - - This function implements the stateless version of the Happy Eyeballs algorithm (RFC - 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, - each one is tried until one connection attempt succeeds. If the first attempt does - not connected within 250 milliseconds, a second attempt is started using the next - address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if - available) is tried first. - - When the connection has been established, a TLS handshake will be done if either - ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. - - :param remote_host: the IP address or host name to connect to - :param remote_port: port on the target host to connect to - :param local_host: the interface address or name to bind the socket to before - connecting - :param tls: ``True`` to do a TLS handshake with the connected stream and return a - :class:`~anyio.streams.tls.TLSStream` instead - :param ssl_context: the SSL context object to use (if omitted, a default context is - created) - :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake - before closing the stream and requires that the server does this as well. - Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. - Some protocols, such as HTTP, require this option to be ``False``. - See :meth:`~ssl.SSLContext.wrap_socket` for details. - :param tls_hostname: host name to check the server certificate against (defaults to - the value of ``remote_host``) - :param happy_eyeballs_delay: delay (in seconds) before starting the next connection - attempt - :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream - :raises OSError: if the connection attempt fails - - """ - # Placed here due to https://github.com/python/mypy/issues/7057 - connected_stream: SocketStream | None = None - - async def try_connect(remote_host: str, event: Event) -> None: - nonlocal connected_stream - try: - stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) - except OSError as exc: - oserrors.append(exc) - return - else: - if connected_stream is None: - connected_stream = stream - tg.cancel_scope.cancel() - else: - await stream.aclose() - finally: - event.set() - - asynclib = get_async_backend() - local_address: IPSockAddrType | None = None - family = socket.AF_UNSPEC - if local_host: - gai_res = await getaddrinfo(str(local_host), None) - family, *_, local_address = gai_res[0] - - target_host = str(remote_host) - try: - addr_obj = ip_address(remote_host) - except ValueError: - addr_obj = None - - if addr_obj is not None: - if isinstance(addr_obj, IPv6Address): - target_addrs = [(socket.AF_INET6, addr_obj.compressed)] - else: - target_addrs = [(socket.AF_INET, addr_obj.compressed)] - else: - # getaddrinfo() will raise an exception if name resolution fails - gai_res = await getaddrinfo( - target_host, remote_port, family=family, type=socket.SOCK_STREAM - ) - - # Organize the list so that the first address is an IPv6 address (if available) - # and the second one is an IPv4 addresses. The rest can be in whatever order. - v6_found = v4_found = False - target_addrs = [] - for af, *rest, sa in gai_res: - if af == socket.AF_INET6 and not v6_found: - v6_found = True - target_addrs.insert(0, (af, sa[0])) - elif af == socket.AF_INET and not v4_found and v6_found: - v4_found = True - target_addrs.insert(1, (af, sa[0])) - else: - target_addrs.append((af, sa[0])) - - oserrors: list[OSError] = [] - try: - async with create_task_group() as tg: - for i, (af, addr) in enumerate(target_addrs): - event = Event() - tg.start_soon(try_connect, addr, event) - with move_on_after(happy_eyeballs_delay): - await event.wait() - - if connected_stream is None: - cause = ( - oserrors[0] - if len(oserrors) == 1 - else ExceptionGroup("multiple connection attempts failed", oserrors) - ) - raise OSError("All connection attempts failed") from cause - finally: - oserrors.clear() - - if tls or tls_hostname or ssl_context: - try: - return await TLSStream.wrap( - connected_stream, - server_side=False, - hostname=tls_hostname or str(remote_host), - ssl_context=ssl_context, - standard_compatible=tls_standard_compatible, - ) - except BaseException: - await aclose_forcefully(connected_stream) - raise - - return connected_stream - - -async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: - """ - Connect to the given UNIX socket. - - Not available on Windows. - - :param path: path to the socket - :return: a socket stream object - - """ - path = os.fspath(path) - return await get_async_backend().connect_unix(path) - - -async def create_tcp_listener( - *, - local_host: IPAddressType | None = None, - local_port: int = 0, - family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, - backlog: int = 65536, - reuse_port: bool = False, -) -> MultiListener[SocketStream]: - """ - Create a TCP socket listener. - - :param local_port: port number to listen on - :param local_host: IP address of the interface to listen on. If omitted, listen on - all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address - family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. - :param family: address family (used if ``local_host`` was omitted) - :param backlog: maximum number of queued incoming connections (up to a maximum of - 2**16, or 65536) - :param reuse_port: ``True`` to allow multiple sockets to bind to the same - address/port (not supported on Windows) - :return: a list of listener objects - - """ - asynclib = get_async_backend() - backlog = min(backlog, 65536) - local_host = str(local_host) if local_host is not None else None - gai_res = await getaddrinfo( - local_host, - local_port, - family=family, - type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - listeners: list[SocketListener] = [] - try: - # The set() is here to work around a glibc bug: - # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 - sockaddr: tuple[str, int] | tuple[str, int, int, int] - for fam, kind, *_, sockaddr in sorted(set(gai_res)): - # Workaround for an uvloop bug where we don't get the correct scope ID for - # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to - # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539 - if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM: - continue - - raw_socket = socket.socket(fam) - raw_socket.setblocking(False) - - # For Windows, enable exclusive address use. For others, enable address - # reuse. - if sys.platform == "win32": - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) - else: - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - - if reuse_port: - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - - # If only IPv6 was requested, disable dual stack operation - if fam == socket.AF_INET6: - raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) - - # Workaround for #554 - if "%" in sockaddr[0]: - addr, scope_id = sockaddr[0].split("%", 1) - sockaddr = (addr, sockaddr[1], 0, int(scope_id)) - - raw_socket.bind(sockaddr) - raw_socket.listen(backlog) - listener = asynclib.create_tcp_listener(raw_socket) - listeners.append(listener) - except BaseException: - for listener in listeners: - await listener.aclose() - - raise - - return MultiListener(listeners) - - -async def create_unix_listener( - path: str | bytes | PathLike[Any], - *, - mode: int | None = None, - backlog: int = 65536, -) -> SocketListener: - """ - Create a UNIX socket listener. - - Not available on Windows. - - :param path: path of the socket - :param mode: permissions to set on the socket - :param backlog: maximum number of queued incoming connections (up to a maximum of - 2**16, or 65536) - :return: a listener object - - .. versionchanged:: 3.0 - If a socket already exists on the file system in the given path, it will be - removed first. - - """ - backlog = min(backlog, 65536) - raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) - try: - raw_socket.listen(backlog) - return get_async_backend().create_unix_listener(raw_socket) - except BaseException: - raw_socket.close() - raise - - -async def create_udp_socket( - family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, - *, - local_host: IPAddressType | None = None, - local_port: int = 0, - reuse_port: bool = False, -) -> UDPSocket: - """ - Create a UDP socket. - - If ``port`` has been given, the socket will be bound to this port on the local - machine, making this socket suitable for providing UDP based services. - - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically - determined from ``local_host`` if omitted - :param local_host: IP address or host name of the local interface to bind to - :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same - address/port (not supported on Windows) - :return: a UDP socket - - """ - if family is AddressFamily.AF_UNSPEC and not local_host: - raise ValueError('Either "family" or "local_host" must be given') - - if local_host: - gai_res = await getaddrinfo( - str(local_host), - local_port, - family=family, - type=socket.SOCK_DGRAM, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - local_address = gai_res[0][-1] - elif family is AddressFamily.AF_INET6: - local_address = ("::", 0) - else: - local_address = ("0.0.0.0", 0) - - sock = await get_async_backend().create_udp_socket( - family, local_address, None, reuse_port - ) - return cast(UDPSocket, sock) - - -async def create_connected_udp_socket( - remote_host: IPAddressType, - remote_port: int, - *, - family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, - local_host: IPAddressType | None = None, - local_port: int = 0, - reuse_port: bool = False, -) -> ConnectedUDPSocket: - """ - Create a connected UDP socket. - - Connected UDP sockets can only communicate with the specified remote host/port, an - any packets sent from other sources are dropped. - - :param remote_host: remote host to set as the default target - :param remote_port: port on the remote host to set as the default target - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically - determined from ``local_host`` or ``remote_host`` if omitted - :param local_host: IP address or host name of the local interface to bind to - :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same - address/port (not supported on Windows) - :return: a connected UDP socket - - """ - local_address = None - if local_host: - gai_res = await getaddrinfo( - str(local_host), - local_port, - family=family, - type=socket.SOCK_DGRAM, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - local_address = gai_res[0][-1] - - gai_res = await getaddrinfo( - str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - remote_address = gai_res[0][-1] - - sock = await get_async_backend().create_udp_socket( - family, local_address, remote_address, reuse_port - ) - return cast(ConnectedUDPSocket, sock) - - -async def create_unix_datagram_socket( - *, - local_path: None | str | bytes | PathLike[Any] = None, - local_mode: int | None = None, -) -> UNIXDatagramSocket: - """ - Create a UNIX datagram socket. - - Not available on Windows. - - If ``local_path`` has been given, the socket will be bound to this path, making this - socket suitable for receiving datagrams from other processes. Other processes can - send datagrams to this socket only if ``local_path`` is set. - - If a socket already exists on the file system in the ``local_path``, it will be - removed first. - - :param local_path: the path on which to bind to - :param local_mode: permissions to set on the local socket - :return: a UNIX datagram socket - - """ - raw_socket = await setup_unix_local_socket( - local_path, local_mode, socket.SOCK_DGRAM - ) - return await get_async_backend().create_unix_datagram_socket(raw_socket, None) - - -async def create_connected_unix_datagram_socket( - remote_path: str | bytes | PathLike[Any], - *, - local_path: None | str | bytes | PathLike[Any] = None, - local_mode: int | None = None, -) -> ConnectedUNIXDatagramSocket: - """ - Create a connected UNIX datagram socket. - - Connected datagram sockets can only communicate with the specified remote path. - - If ``local_path`` has been given, the socket will be bound to this path, making - this socket suitable for receiving datagrams from other processes. Other processes - can send datagrams to this socket only if ``local_path`` is set. - - If a socket already exists on the file system in the ``local_path``, it will be - removed first. - - :param remote_path: the path to set as the default target - :param local_path: the path on which to bind to - :param local_mode: permissions to set on the local socket - :return: a connected UNIX datagram socket - - """ - remote_path = os.fspath(remote_path) - raw_socket = await setup_unix_local_socket( - local_path, local_mode, socket.SOCK_DGRAM - ) - return await get_async_backend().create_unix_datagram_socket( - raw_socket, remote_path - ) - - -async def getaddrinfo( - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, -) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: - """ - Look up a numeric IP address given a host name. - - Internationalized domain names are translated according to the (non-transitional) - IDNA 2008 standard. - - .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of - (host, port), unlike what :func:`socket.getaddrinfo` does. - - :param host: host name - :param port: port number - :param family: socket family (`'AF_INET``, ...) - :param type: socket type (``SOCK_STREAM``, ...) - :param proto: protocol number - :param flags: flags to pass to upstream ``getaddrinfo()`` - :return: list of tuples containing (family, type, proto, canonname, sockaddr) - - .. seealso:: :func:`socket.getaddrinfo` - - """ - # Handle unicode hostnames - if isinstance(host, str): - try: - encoded_host: bytes | None = host.encode("ascii") - except UnicodeEncodeError: - import idna - - encoded_host = idna.encode(host, uts46=True) - else: - encoded_host = host - - gai_res = await get_async_backend().getaddrinfo( - encoded_host, port, family=family, type=type, proto=proto, flags=flags - ) - return [ - (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) - for family, type, proto, canonname, sockaddr in gai_res - # filter out IPv6 results when IPv6 is disabled - if not isinstance(sockaddr[0], int) - ] - - -def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: - """ - Look up the host name of an IP address. - - :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) - :param flags: flags to pass to upstream ``getnameinfo()`` - :return: a tuple of (host name, service name) - - .. seealso:: :func:`socket.getnameinfo` - - """ - return get_async_backend().getnameinfo(sockaddr, flags) - - -@deprecated("This function is deprecated; use `wait_readable` instead") -def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: - """ - .. deprecated:: 4.7.0 - Use :func:`wait_readable` instead. - - Wait until the given socket has data to be read. - - .. warning:: Only use this on raw sockets that have not been wrapped by any higher - level constructs like socket streams! - - :param sock: a socket object - :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the - socket to become readable - :raises ~anyio.BusyResourceError: if another task is already waiting for the socket - to become readable - - """ - return get_async_backend().wait_readable(sock.fileno()) - - -@deprecated("This function is deprecated; use `wait_writable` instead") -def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: - """ - .. deprecated:: 4.7.0 - Use :func:`wait_writable` instead. - - Wait until the given socket can be written to. - - This does **NOT** work on Windows when using the asyncio backend with a proactor - event loop (default on py3.8+). - - .. warning:: Only use this on raw sockets that have not been wrapped by any higher - level constructs like socket streams! - - :param sock: a socket object - :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the - socket to become writable - :raises ~anyio.BusyResourceError: if another task is already waiting for the socket - to become writable - - """ - return get_async_backend().wait_writable(sock.fileno()) - - -def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]: - """ - Wait until the given object has data to be read. - - On Unix systems, ``obj`` must either be an integer file descriptor, or else an - object with a ``.fileno()`` method which returns an integer file descriptor. Any - kind of file descriptor can be passed, though the exact semantics will depend on - your kernel. For example, this probably won't do anything useful for on-disk files. - - On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an - object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File - descriptors aren't supported, and neither are handles that refer to anything besides - a ``SOCKET``. - - On backends where this functionality is not natively provided (asyncio - ``ProactorEventLoop`` on Windows), it is provided using a separate selector thread - which is set to shut down when the interpreter shuts down. - - .. warning:: Don't use this on raw sockets that have been wrapped by any higher - level constructs like socket streams! - - :param obj: an object with a ``.fileno()`` method or an integer handle - :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the - object to become readable - :raises ~anyio.BusyResourceError: if another task is already waiting for the object - to become readable - - """ - return get_async_backend().wait_readable(obj) - - -def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]: - """ - Wait until the given object can be written to. - - :param obj: an object with a ``.fileno()`` method or an integer handle - :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the - object to become writable - :raises ~anyio.BusyResourceError: if another task is already waiting for the object - to become writable - - .. seealso:: See the documentation of :func:`wait_readable` for the definition of - ``obj`` and notes on backend compatibility. - - .. warning:: Don't use this on raw sockets that have been wrapped by any higher - level constructs like socket streams! - - """ - return get_async_backend().wait_writable(obj) - - -# -# Private API -# - - -def convert_ipv6_sockaddr( - sockaddr: tuple[str, int, int, int] | tuple[str, int], -) -> tuple[str, int]: - """ - Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. - - If the scope ID is nonzero, it is added to the address, separated with ``%``. - Otherwise the flow id and scope id are simply cut off from the tuple. - Any other kinds of socket addresses are returned as-is. - - :param sockaddr: the result of :meth:`~socket.socket.getsockname` - :return: the converted socket address - - """ - # This is more complicated than it should be because of MyPy - if isinstance(sockaddr, tuple) and len(sockaddr) == 4: - host, port, flowinfo, scope_id = sockaddr - if scope_id: - # PyPy (as of v7.3.11) leaves the interface name in the result, so - # we discard it and only get the scope ID from the end - # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) - host = host.split("%")[0] - - # Add scope_id to the address - return f"{host}%{scope_id}", port - else: - return host, port - else: - return sockaddr - - -async def setup_unix_local_socket( - path: None | str | bytes | PathLike[Any], - mode: int | None, - socktype: int, -) -> socket.socket: - """ - Create a UNIX local socket object, deleting the socket at the given path if it - exists. - - Not available on Windows. - - :param path: path of the socket - :param mode: permissions to set on the socket - :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM - - """ - path_str: str | None - if path is not None: - path_str = os.fsdecode(path) - - # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call - if not path_str.startswith("\0"): - # Copied from pathlib... - try: - stat_result = os.stat(path) - except OSError as e: - if e.errno not in ( - errno.ENOENT, - errno.ENOTDIR, - errno.EBADF, - errno.ELOOP, - ): - raise - else: - if stat.S_ISSOCK(stat_result.st_mode): - os.unlink(path) - else: - path_str = None - - raw_socket = socket.socket(socket.AF_UNIX, socktype) - raw_socket.setblocking(False) - - if path_str is not None: - try: - await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) - if mode is not None: - await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) - except BaseException: - raw_socket.close() - raise - - return raw_socket diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_streams.py b/venv/lib/python3.12/site-packages/anyio/_core/_streams.py deleted file mode 100644 index 6a9814e5..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_streams.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -import math -from typing import TypeVar -from warnings import warn - -from ..streams.memory import ( - MemoryObjectReceiveStream, - MemoryObjectSendStream, - MemoryObjectStreamState, -) - -T_Item = TypeVar("T_Item") - - -class create_memory_object_stream( - tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], -): - """ - Create a memory object stream. - - The stream's item type can be annotated like - :func:`create_memory_object_stream[T_Item]`. - - :param max_buffer_size: number of items held in the buffer until ``send()`` starts - blocking - :param item_type: old way of marking the streams with the right generic type for - static typing (does nothing on AnyIO 4) - - .. deprecated:: 4.0 - Use ``create_memory_object_stream[YourItemType](...)`` instead. - :return: a tuple of (send stream, receive stream) - - """ - - def __new__( # type: ignore[misc] - cls, max_buffer_size: float = 0, item_type: object = None - ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: - if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): - raise ValueError("max_buffer_size must be either an integer or math.inf") - if max_buffer_size < 0: - raise ValueError("max_buffer_size cannot be negative") - if item_type is not None: - warn( - "The item_type argument has been deprecated in AnyIO 4.0. " - "Use create_memory_object_stream[YourItemType](...) instead.", - DeprecationWarning, - stacklevel=2, - ) - - state = MemoryObjectStreamState[T_Item](max_buffer_size) - return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py b/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py deleted file mode 100644 index 36d9b306..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py +++ /dev/null @@ -1,202 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import AsyncIterable, Iterable, Mapping, Sequence -from io import BytesIO -from os import PathLike -from subprocess import PIPE, CalledProcessError, CompletedProcess -from typing import IO, Any, Union, cast - -from ..abc import Process -from ._eventloop import get_async_backend -from ._tasks import create_task_group - -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - -StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] - - -async def run_process( - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - input: bytes | None = None, - stdin: int | IO[Any] | None = None, - stdout: int | IO[Any] | None = PIPE, - stderr: int | IO[Any] | None = PIPE, - check: bool = True, - cwd: StrOrBytesPath | None = None, - env: Mapping[str, str] | None = None, - startupinfo: Any = None, - creationflags: int = 0, - start_new_session: bool = False, - pass_fds: Sequence[int] = (), - user: str | int | None = None, - group: str | int | None = None, - extra_groups: Iterable[str | int] | None = None, - umask: int = -1, -) -> CompletedProcess[bytes]: - """ - Run an external command in a subprocess and wait until it completes. - - .. seealso:: :func:`subprocess.run` - - :param command: either a string to pass to the shell, or an iterable of strings - containing the executable name or path and its arguments - :param input: bytes passed to the standard input of the subprocess - :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - a file-like object, or `None`; ``input`` overrides this - :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - a file-like object, or `None` - :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - :data:`subprocess.STDOUT`, a file-like object, or `None` - :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the - process terminates with a return code other than 0 - :param cwd: If not ``None``, change the working directory to this before running the - command - :param env: if not ``None``, this mapping replaces the inherited environment - variables from the parent process - :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used - to specify process startup parameters (Windows only) - :param creationflags: flags that can be used to control the creation of the - subprocess (see :class:`subprocess.Popen` for the specifics) - :param start_new_session: if ``true`` the setsid() system call will be made in the - child process prior to the execution of the subprocess. (POSIX only) - :param pass_fds: sequence of file descriptors to keep open between the parent and - child processes. (POSIX only) - :param user: effective user to run the process as (Python >= 3.9, POSIX only) - :param group: effective group to run the process as (Python >= 3.9, POSIX only) - :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9, - POSIX only) - :param umask: if not negative, this umask is applied in the child process before - running the given command (Python >= 3.9, POSIX only) - :return: an object representing the completed process - :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process - exits with a nonzero return code - - """ - - async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: - buffer = BytesIO() - async for chunk in stream: - buffer.write(chunk) - - stream_contents[index] = buffer.getvalue() - - if stdin is not None and input is not None: - raise ValueError("only one of stdin and input is allowed") - - async with await open_process( - command, - stdin=PIPE if input else stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - startupinfo=startupinfo, - creationflags=creationflags, - start_new_session=start_new_session, - pass_fds=pass_fds, - user=user, - group=group, - extra_groups=extra_groups, - umask=umask, - ) as process: - stream_contents: list[bytes | None] = [None, None] - async with create_task_group() as tg: - if process.stdout: - tg.start_soon(drain_stream, process.stdout, 0) - - if process.stderr: - tg.start_soon(drain_stream, process.stderr, 1) - - if process.stdin and input: - await process.stdin.send(input) - await process.stdin.aclose() - - await process.wait() - - output, errors = stream_contents - if check and process.returncode != 0: - raise CalledProcessError(cast(int, process.returncode), command, output, errors) - - return CompletedProcess(command, cast(int, process.returncode), output, errors) - - -async def open_process( - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None = PIPE, - stdout: int | IO[Any] | None = PIPE, - stderr: int | IO[Any] | None = PIPE, - cwd: StrOrBytesPath | None = None, - env: Mapping[str, str] | None = None, - startupinfo: Any = None, - creationflags: int = 0, - start_new_session: bool = False, - pass_fds: Sequence[int] = (), - user: str | int | None = None, - group: str | int | None = None, - extra_groups: Iterable[str | int] | None = None, - umask: int = -1, -) -> Process: - """ - Start an external command in a subprocess. - - .. seealso:: :class:`subprocess.Popen` - - :param command: either a string to pass to the shell, or an iterable of strings - containing the executable name or path and its arguments - :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a - file-like object, or ``None`` - :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - a file-like object, or ``None`` - :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - :data:`subprocess.STDOUT`, a file-like object, or ``None`` - :param cwd: If not ``None``, the working directory is changed before executing - :param env: If env is not ``None``, it must be a mapping that defines the - environment variables for the new process - :param creationflags: flags that can be used to control the creation of the - subprocess (see :class:`subprocess.Popen` for the specifics) - :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used - to specify process startup parameters (Windows only) - :param start_new_session: if ``true`` the setsid() system call will be made in the - child process prior to the execution of the subprocess. (POSIX only) - :param pass_fds: sequence of file descriptors to keep open between the parent and - child processes. (POSIX only) - :param user: effective user to run the process as (POSIX only) - :param group: effective group to run the process as (POSIX only) - :param extra_groups: supplementary groups to set in the subprocess (POSIX only) - :param umask: if not negative, this umask is applied in the child process before - running the given command (POSIX only) - :return: an asynchronous process object - - """ - kwargs: dict[str, Any] = {} - if user is not None: - kwargs["user"] = user - - if group is not None: - kwargs["group"] = group - - if extra_groups is not None: - kwargs["extra_groups"] = group - - if umask >= 0: - kwargs["umask"] = umask - - return await get_async_backend().open_process( - command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - startupinfo=startupinfo, - creationflags=creationflags, - start_new_session=start_new_session, - pass_fds=pass_fds, - **kwargs, - ) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py b/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py deleted file mode 100644 index a6331328..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py +++ /dev/null @@ -1,732 +0,0 @@ -from __future__ import annotations - -import math -from collections import deque -from dataclasses import dataclass -from types import TracebackType - -from sniffio import AsyncLibraryNotFoundError - -from ..lowlevel import checkpoint -from ._eventloop import get_async_backend -from ._exceptions import BusyResourceError -from ._tasks import CancelScope -from ._testing import TaskInfo, get_current_task - - -@dataclass(frozen=True) -class EventStatistics: - """ - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` - """ - - tasks_waiting: int - - -@dataclass(frozen=True) -class CapacityLimiterStatistics: - """ - :ivar int borrowed_tokens: number of tokens currently borrowed by tasks - :ivar float total_tokens: total number of available tokens - :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from - this limiter - :ivar int tasks_waiting: number of tasks waiting on - :meth:`~.CapacityLimiter.acquire` or - :meth:`~.CapacityLimiter.acquire_on_behalf_of` - """ - - borrowed_tokens: int - total_tokens: float - borrowers: tuple[object, ...] - tasks_waiting: int - - -@dataclass(frozen=True) -class LockStatistics: - """ - :ivar bool locked: flag indicating if this lock is locked or not - :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the - lock is not held by any task) - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` - """ - - locked: bool - owner: TaskInfo | None - tasks_waiting: int - - -@dataclass(frozen=True) -class ConditionStatistics: - """ - :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` - :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying - :class:`~.Lock` - """ - - tasks_waiting: int - lock_statistics: LockStatistics - - -@dataclass(frozen=True) -class SemaphoreStatistics: - """ - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` - - """ - - tasks_waiting: int - - -class Event: - def __new__(cls) -> Event: - try: - return get_async_backend().create_event() - except AsyncLibraryNotFoundError: - return EventAdapter() - - def set(self) -> None: - """Set the flag, notifying all listeners.""" - raise NotImplementedError - - def is_set(self) -> bool: - """Return ``True`` if the flag is set, ``False`` if not.""" - raise NotImplementedError - - async def wait(self) -> None: - """ - Wait until the flag has been set. - - If the flag has already been set when this method is called, it returns - immediately. - - """ - raise NotImplementedError - - def statistics(self) -> EventStatistics: - """Return statistics about the current state of this event.""" - raise NotImplementedError - - -class EventAdapter(Event): - _internal_event: Event | None = None - _is_set: bool = False - - def __new__(cls) -> EventAdapter: - return object.__new__(cls) - - @property - def _event(self) -> Event: - if self._internal_event is None: - self._internal_event = get_async_backend().create_event() - if self._is_set: - self._internal_event.set() - - return self._internal_event - - def set(self) -> None: - if self._internal_event is None: - self._is_set = True - else: - self._event.set() - - def is_set(self) -> bool: - if self._internal_event is None: - return self._is_set - - return self._internal_event.is_set() - - async def wait(self) -> None: - await self._event.wait() - - def statistics(self) -> EventStatistics: - if self._internal_event is None: - return EventStatistics(tasks_waiting=0) - - return self._internal_event.statistics() - - -class Lock: - def __new__(cls, *, fast_acquire: bool = False) -> Lock: - try: - return get_async_backend().create_lock(fast_acquire=fast_acquire) - except AsyncLibraryNotFoundError: - return LockAdapter(fast_acquire=fast_acquire) - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - async def acquire(self) -> None: - """Acquire the lock.""" - raise NotImplementedError - - def acquire_nowait(self) -> None: - """ - Acquire the lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - raise NotImplementedError - - def release(self) -> None: - """Release the lock.""" - raise NotImplementedError - - def locked(self) -> bool: - """Return True if the lock is currently held.""" - raise NotImplementedError - - def statistics(self) -> LockStatistics: - """ - Return statistics about the current state of this lock. - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - -class LockAdapter(Lock): - _internal_lock: Lock | None = None - - def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter: - return object.__new__(cls) - - def __init__(self, *, fast_acquire: bool = False): - self._fast_acquire = fast_acquire - - @property - def _lock(self) -> Lock: - if self._internal_lock is None: - self._internal_lock = get_async_backend().create_lock( - fast_acquire=self._fast_acquire - ) - - return self._internal_lock - - async def __aenter__(self) -> None: - await self._lock.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - if self._internal_lock is not None: - self._internal_lock.release() - - async def acquire(self) -> None: - """Acquire the lock.""" - await self._lock.acquire() - - def acquire_nowait(self) -> None: - """ - Acquire the lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - self._lock.acquire_nowait() - - def release(self) -> None: - """Release the lock.""" - self._lock.release() - - def locked(self) -> bool: - """Return True if the lock is currently held.""" - return self._lock.locked() - - def statistics(self) -> LockStatistics: - """ - Return statistics about the current state of this lock. - - .. versionadded:: 3.0 - - """ - if self._internal_lock is None: - return LockStatistics(False, None, 0) - - return self._internal_lock.statistics() - - -class Condition: - _owner_task: TaskInfo | None = None - - def __init__(self, lock: Lock | None = None): - self._lock = lock or Lock() - self._waiters: deque[Event] = deque() - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - def _check_acquired(self) -> None: - if self._owner_task != get_current_task(): - raise RuntimeError("The current task is not holding the underlying lock") - - async def acquire(self) -> None: - """Acquire the underlying lock.""" - await self._lock.acquire() - self._owner_task = get_current_task() - - def acquire_nowait(self) -> None: - """ - Acquire the underlying lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - self._lock.acquire_nowait() - self._owner_task = get_current_task() - - def release(self) -> None: - """Release the underlying lock.""" - self._lock.release() - - def locked(self) -> bool: - """Return True if the lock is set.""" - return self._lock.locked() - - def notify(self, n: int = 1) -> None: - """Notify exactly n listeners.""" - self._check_acquired() - for _ in range(n): - try: - event = self._waiters.popleft() - except IndexError: - break - - event.set() - - def notify_all(self) -> None: - """Notify all the listeners.""" - self._check_acquired() - for event in self._waiters: - event.set() - - self._waiters.clear() - - async def wait(self) -> None: - """Wait for a notification.""" - await checkpoint() - event = Event() - self._waiters.append(event) - self.release() - try: - await event.wait() - except BaseException: - if not event.is_set(): - self._waiters.remove(event) - - raise - finally: - with CancelScope(shield=True): - await self.acquire() - - def statistics(self) -> ConditionStatistics: - """ - Return statistics about the current state of this condition. - - .. versionadded:: 3.0 - """ - return ConditionStatistics(len(self._waiters), self._lock.statistics()) - - -class Semaphore: - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - try: - return get_async_backend().create_semaphore( - initial_value, max_value=max_value, fast_acquire=fast_acquire - ) - except AsyncLibraryNotFoundError: - return SemaphoreAdapter(initial_value, max_value=max_value) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ): - if not isinstance(initial_value, int): - raise TypeError("initial_value must be an integer") - if initial_value < 0: - raise ValueError("initial_value must be >= 0") - if max_value is not None: - if not isinstance(max_value, int): - raise TypeError("max_value must be an integer or None") - if max_value < initial_value: - raise ValueError( - "max_value must be equal to or higher than initial_value" - ) - - self._fast_acquire = fast_acquire - - async def __aenter__(self) -> Semaphore: - await self.acquire() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - async def acquire(self) -> None: - """Decrement the semaphore value, blocking if necessary.""" - raise NotImplementedError - - def acquire_nowait(self) -> None: - """ - Acquire the underlying lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - raise NotImplementedError - - def release(self) -> None: - """Increment the semaphore value.""" - raise NotImplementedError - - @property - def value(self) -> int: - """The current value of the semaphore.""" - raise NotImplementedError - - @property - def max_value(self) -> int | None: - """The maximum value of the semaphore.""" - raise NotImplementedError - - def statistics(self) -> SemaphoreStatistics: - """ - Return statistics about the current state of this semaphore. - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - -class SemaphoreAdapter(Semaphore): - _internal_semaphore: Semaphore | None = None - - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> SemaphoreAdapter: - return object.__new__(cls) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> None: - super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) - self._initial_value = initial_value - self._max_value = max_value - - @property - def _semaphore(self) -> Semaphore: - if self._internal_semaphore is None: - self._internal_semaphore = get_async_backend().create_semaphore( - self._initial_value, max_value=self._max_value - ) - - return self._internal_semaphore - - async def acquire(self) -> None: - await self._semaphore.acquire() - - def acquire_nowait(self) -> None: - self._semaphore.acquire_nowait() - - def release(self) -> None: - self._semaphore.release() - - @property - def value(self) -> int: - if self._internal_semaphore is None: - return self._initial_value - - return self._semaphore.value - - @property - def max_value(self) -> int | None: - return self._max_value - - def statistics(self) -> SemaphoreStatistics: - if self._internal_semaphore is None: - return SemaphoreStatistics(tasks_waiting=0) - - return self._semaphore.statistics() - - -class CapacityLimiter: - def __new__(cls, total_tokens: float) -> CapacityLimiter: - try: - return get_async_backend().create_capacity_limiter(total_tokens) - except AsyncLibraryNotFoundError: - return CapacityLimiterAdapter(total_tokens) - - async def __aenter__(self) -> None: - raise NotImplementedError - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - raise NotImplementedError - - @property - def total_tokens(self) -> float: - """ - The total number of tokens available for borrowing. - - This is a read-write property. If the total number of tokens is increased, the - proportionate number of tasks waiting on this limiter will be granted their - tokens. - - .. versionchanged:: 3.0 - The property is now writable. - - """ - raise NotImplementedError - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - raise NotImplementedError - - @property - def borrowed_tokens(self) -> int: - """The number of tokens that have currently been borrowed.""" - raise NotImplementedError - - @property - def available_tokens(self) -> float: - """The number of tokens currently available to be borrowed""" - raise NotImplementedError - - def acquire_nowait(self) -> None: - """ - Acquire a token for the current task without waiting for one to become - available. - - :raises ~anyio.WouldBlock: if there are no tokens available for borrowing - - """ - raise NotImplementedError - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - """ - Acquire a token without waiting for one to become available. - - :param borrower: the entity borrowing a token - :raises ~anyio.WouldBlock: if there are no tokens available for borrowing - - """ - raise NotImplementedError - - async def acquire(self) -> None: - """ - Acquire a token for the current task, waiting if necessary for one to become - available. - - """ - raise NotImplementedError - - async def acquire_on_behalf_of(self, borrower: object) -> None: - """ - Acquire a token, waiting if necessary for one to become available. - - :param borrower: the entity borrowing a token - - """ - raise NotImplementedError - - def release(self) -> None: - """ - Release the token held by the current task. - - :raises RuntimeError: if the current task has not borrowed a token from this - limiter. - - """ - raise NotImplementedError - - def release_on_behalf_of(self, borrower: object) -> None: - """ - Release the token held by the given borrower. - - :raises RuntimeError: if the borrower has not borrowed a token from this - limiter. - - """ - raise NotImplementedError - - def statistics(self) -> CapacityLimiterStatistics: - """ - Return statistics about the current state of this limiter. - - .. versionadded:: 3.0 - - """ - raise NotImplementedError - - -class CapacityLimiterAdapter(CapacityLimiter): - _internal_limiter: CapacityLimiter | None = None - - def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: - return object.__new__(cls) - - def __init__(self, total_tokens: float) -> None: - self.total_tokens = total_tokens - - @property - def _limiter(self) -> CapacityLimiter: - if self._internal_limiter is None: - self._internal_limiter = get_async_backend().create_capacity_limiter( - self._total_tokens - ) - - return self._internal_limiter - - async def __aenter__(self) -> None: - await self._limiter.__aenter__() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) - - @property - def total_tokens(self) -> float: - if self._internal_limiter is None: - return self._total_tokens - - return self._internal_limiter.total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - if not isinstance(value, int) and value is not math.inf: - raise TypeError("total_tokens must be an int or math.inf") - elif value < 1: - raise ValueError("total_tokens must be >= 1") - - if self._internal_limiter is None: - self._total_tokens = value - return - - self._limiter.total_tokens = value - - @property - def borrowed_tokens(self) -> int: - if self._internal_limiter is None: - return 0 - - return self._internal_limiter.borrowed_tokens - - @property - def available_tokens(self) -> float: - if self._internal_limiter is None: - return self._total_tokens - - return self._internal_limiter.available_tokens - - def acquire_nowait(self) -> None: - self._limiter.acquire_nowait() - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - self._limiter.acquire_on_behalf_of_nowait(borrower) - - async def acquire(self) -> None: - await self._limiter.acquire() - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await self._limiter.acquire_on_behalf_of(borrower) - - def release(self) -> None: - self._limiter.release() - - def release_on_behalf_of(self, borrower: object) -> None: - self._limiter.release_on_behalf_of(borrower) - - def statistics(self) -> CapacityLimiterStatistics: - if self._internal_limiter is None: - return CapacityLimiterStatistics( - borrowed_tokens=0, - total_tokens=self.total_tokens, - borrowers=(), - tasks_waiting=0, - ) - - return self._internal_limiter.statistics() - - -class ResourceGuard: - """ - A context manager for ensuring that a resource is only used by a single task at a - time. - - Entering this context manager while the previous has not exited it yet will trigger - :exc:`BusyResourceError`. - - :param action: the action to guard against (visible in the :exc:`BusyResourceError` - when triggered, e.g. "Another task is already {action} this resource") - - .. versionadded:: 4.1 - """ - - __slots__ = "action", "_guarded" - - def __init__(self, action: str = "using"): - self.action: str = action - self._guarded = False - - def __enter__(self) -> None: - if self._guarded: - raise BusyResourceError(self.action) - - self._guarded = True - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self._guarded = False diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py b/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py deleted file mode 100644 index fe490151..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py +++ /dev/null @@ -1,158 +0,0 @@ -from __future__ import annotations - -import math -from collections.abc import Generator -from contextlib import contextmanager -from types import TracebackType - -from ..abc._tasks import TaskGroup, TaskStatus -from ._eventloop import get_async_backend - - -class _IgnoredTaskStatus(TaskStatus[object]): - def started(self, value: object = None) -> None: - pass - - -TASK_STATUS_IGNORED = _IgnoredTaskStatus() - - -class CancelScope: - """ - Wraps a unit of work that can be made separately cancellable. - - :param deadline: The time (clock value) when this scope is cancelled automatically - :param shield: ``True`` to shield the cancel scope from external cancellation - """ - - def __new__( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) - - def cancel(self) -> None: - """Cancel this scope immediately.""" - raise NotImplementedError - - @property - def deadline(self) -> float: - """ - The time (clock value) when this scope is cancelled automatically. - - Will be ``float('inf')`` if no timeout has been set. - - """ - raise NotImplementedError - - @deadline.setter - def deadline(self, value: float) -> None: - raise NotImplementedError - - @property - def cancel_called(self) -> bool: - """``True`` if :meth:`cancel` has been called.""" - raise NotImplementedError - - @property - def cancelled_caught(self) -> bool: - """ - ``True`` if this scope suppressed a cancellation exception it itself raised. - - This is typically used to check if any work was interrupted, or to see if the - scope was cancelled due to its deadline being reached. The value will, however, - only be ``True`` if the cancellation was triggered by the scope itself (and not - an outer scope). - - """ - raise NotImplementedError - - @property - def shield(self) -> bool: - """ - ``True`` if this scope is shielded from external cancellation. - - While a scope is shielded, it will not receive cancellations from outside. - - """ - raise NotImplementedError - - @shield.setter - def shield(self, value: bool) -> None: - raise NotImplementedError - - def __enter__(self) -> CancelScope: - raise NotImplementedError - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool: - raise NotImplementedError - - -@contextmanager -def fail_after( - delay: float | None, shield: bool = False -) -> Generator[CancelScope, None, None]: - """ - Create a context manager which raises a :class:`TimeoutError` if does not finish in - time. - - :param delay: maximum allowed time (in seconds) before raising the exception, or - ``None`` to disable the timeout - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a context manager that yields a cancel scope - :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] - - """ - current_time = get_async_backend().current_time - deadline = (current_time() + delay) if delay is not None else math.inf - with get_async_backend().create_cancel_scope( - deadline=deadline, shield=shield - ) as cancel_scope: - yield cancel_scope - - if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: - raise TimeoutError - - -def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: - """ - Create a cancel scope with a deadline that expires after the given delay. - - :param delay: maximum allowed time (in seconds) before exiting the context block, or - ``None`` to disable the timeout - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a cancel scope - - """ - deadline = ( - (get_async_backend().current_time() + delay) if delay is not None else math.inf - ) - return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) - - -def current_effective_deadline() -> float: - """ - Return the nearest deadline among all the cancel scopes effective for the current - task. - - :return: a clock value from the event loop's internal clock (or ``float('inf')`` if - there is no deadline in effect, or ``float('-inf')`` if the current scope has - been cancelled) - :rtype: float - - """ - return get_async_backend().current_effective_deadline() - - -def create_task_group() -> TaskGroup: - """ - Create a task group. - - :return: a task group - - """ - return get_async_backend().create_task_group() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py b/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py deleted file mode 100644 index 26d70eca..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py +++ /dev/null @@ -1,616 +0,0 @@ -from __future__ import annotations - -import os -import sys -import tempfile -from collections.abc import Iterable -from io import BytesIO, TextIOWrapper -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - AnyStr, - Generic, - overload, -) - -from .. import to_thread -from .._core._fileio import AsyncFile -from ..lowlevel import checkpoint_if_cancelled - -if TYPE_CHECKING: - from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer - - -class TemporaryFile(Generic[AnyStr]): - """ - An asynchronous temporary file that is automatically created and cleaned up. - - This class provides an asynchronous context manager interface to a temporary file. - The file is created using Python's standard `tempfile.TemporaryFile` function in a - background thread, and is wrapped as an asynchronous file using `AsyncFile`. - - :param mode: The mode in which the file is opened. Defaults to "w+b". - :param buffering: The buffering policy (-1 means the default buffering). - :param encoding: The encoding used to decode or encode the file. Only applicable in - text mode. - :param newline: Controls how universal newlines mode works (only applicable in text - mode). - :param suffix: The suffix for the temporary file name. - :param prefix: The prefix for the temporary file name. - :param dir: The directory in which the temporary file is created. - :param errors: The error handling scheme used for encoding/decoding errors. - """ - - _async_file: AsyncFile[AnyStr] - - @overload - def __init__( - self: TemporaryFile[bytes], - mode: OpenBinaryMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., - *, - errors: str | None = ..., - ): ... - @overload - def __init__( - self: TemporaryFile[str], - mode: OpenTextMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., - *, - errors: str | None = ..., - ): ... - - def __init__( - self, - mode: OpenTextMode | OpenBinaryMode = "w+b", - buffering: int = -1, - encoding: str | None = None, - newline: str | None = None, - suffix: str | None = None, - prefix: str | None = None, - dir: str | None = None, - *, - errors: str | None = None, - ) -> None: - self.mode = mode - self.buffering = buffering - self.encoding = encoding - self.newline = newline - self.suffix: str | None = suffix - self.prefix: str | None = prefix - self.dir: str | None = dir - self.errors = errors - - async def __aenter__(self) -> AsyncFile[AnyStr]: - fp = await to_thread.run_sync( - lambda: tempfile.TemporaryFile( - self.mode, - self.buffering, - self.encoding, - self.newline, - self.suffix, - self.prefix, - self.dir, - errors=self.errors, - ) - ) - self._async_file = AsyncFile(fp) - return self._async_file - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - traceback: TracebackType | None, - ) -> None: - await self._async_file.aclose() - - -class NamedTemporaryFile(Generic[AnyStr]): - """ - An asynchronous named temporary file that is automatically created and cleaned up. - - This class provides an asynchronous context manager for a temporary file with a - visible name in the file system. It uses Python's standard - :func:`~tempfile.NamedTemporaryFile` function and wraps the file object with - :class:`AsyncFile` for asynchronous operations. - - :param mode: The mode in which the file is opened. Defaults to "w+b". - :param buffering: The buffering policy (-1 means the default buffering). - :param encoding: The encoding used to decode or encode the file. Only applicable in - text mode. - :param newline: Controls how universal newlines mode works (only applicable in text - mode). - :param suffix: The suffix for the temporary file name. - :param prefix: The prefix for the temporary file name. - :param dir: The directory in which the temporary file is created. - :param delete: Whether to delete the file when it is closed. - :param errors: The error handling scheme used for encoding/decoding errors. - :param delete_on_close: (Python 3.12+) Whether to delete the file on close. - """ - - _async_file: AsyncFile[AnyStr] - - @overload - def __init__( - self: NamedTemporaryFile[bytes], - mode: OpenBinaryMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., - delete: bool = ..., - *, - errors: str | None = ..., - delete_on_close: bool = ..., - ): ... - @overload - def __init__( - self: NamedTemporaryFile[str], - mode: OpenTextMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., - delete: bool = ..., - *, - errors: str | None = ..., - delete_on_close: bool = ..., - ): ... - - def __init__( - self, - mode: OpenBinaryMode | OpenTextMode = "w+b", - buffering: int = -1, - encoding: str | None = None, - newline: str | None = None, - suffix: str | None = None, - prefix: str | None = None, - dir: str | None = None, - delete: bool = True, - *, - errors: str | None = None, - delete_on_close: bool = True, - ) -> None: - self._params: dict[str, Any] = { - "mode": mode, - "buffering": buffering, - "encoding": encoding, - "newline": newline, - "suffix": suffix, - "prefix": prefix, - "dir": dir, - "delete": delete, - "errors": errors, - } - if sys.version_info >= (3, 12): - self._params["delete_on_close"] = delete_on_close - - async def __aenter__(self) -> AsyncFile[AnyStr]: - fp = await to_thread.run_sync( - lambda: tempfile.NamedTemporaryFile(**self._params) - ) - self._async_file = AsyncFile(fp) - return self._async_file - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - traceback: TracebackType | None, - ) -> None: - await self._async_file.aclose() - - -class SpooledTemporaryFile(AsyncFile[AnyStr]): - """ - An asynchronous spooled temporary file that starts in memory and is spooled to disk. - - This class provides an asynchronous interface to a spooled temporary file, much like - Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous - write operations and provides a method to force a rollover to disk. - - :param max_size: Maximum size in bytes before the file is rolled over to disk. - :param mode: The mode in which the file is opened. Defaults to "w+b". - :param buffering: The buffering policy (-1 means the default buffering). - :param encoding: The encoding used to decode or encode the file (text mode only). - :param newline: Controls how universal newlines mode works (text mode only). - :param suffix: The suffix for the temporary file name. - :param prefix: The prefix for the temporary file name. - :param dir: The directory in which the temporary file is created. - :param errors: The error handling scheme used for encoding/decoding errors. - """ - - _rolled: bool = False - - @overload - def __init__( - self: SpooledTemporaryFile[bytes], - max_size: int = ..., - mode: OpenBinaryMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., - *, - errors: str | None = ..., - ): ... - @overload - def __init__( - self: SpooledTemporaryFile[str], - max_size: int = ..., - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., - *, - errors: str | None = ..., - ): ... - - def __init__( - self, - max_size: int = 0, - mode: OpenBinaryMode | OpenTextMode = "w+b", - buffering: int = -1, - encoding: str | None = None, - newline: str | None = None, - suffix: str | None = None, - prefix: str | None = None, - dir: str | None = None, - *, - errors: str | None = None, - ) -> None: - self._tempfile_params: dict[str, Any] = { - "mode": mode, - "buffering": buffering, - "encoding": encoding, - "newline": newline, - "suffix": suffix, - "prefix": prefix, - "dir": dir, - "errors": errors, - } - self._max_size = max_size - if "b" in mode: - super().__init__(BytesIO()) # type: ignore[arg-type] - else: - super().__init__( - TextIOWrapper( # type: ignore[arg-type] - BytesIO(), - encoding=encoding, - errors=errors, - newline=newline, - write_through=True, - ) - ) - - async def aclose(self) -> None: - if not self._rolled: - self._fp.close() - return - - await super().aclose() - - async def _check(self) -> None: - if self._rolled or self._fp.tell() < self._max_size: - return - - await self.rollover() - - async def rollover(self) -> None: - if self._rolled: - return - - self._rolled = True - buffer = self._fp - buffer.seek(0) - self._fp = await to_thread.run_sync( - lambda: tempfile.TemporaryFile(**self._tempfile_params) - ) - await self.write(buffer.read()) - buffer.close() - - @property - def closed(self) -> bool: - return self._fp.closed - - async def read(self, size: int = -1) -> AnyStr: - if not self._rolled: - await checkpoint_if_cancelled() - return self._fp.read(size) - - return await super().read(size) # type: ignore[return-value] - - async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes: - if not self._rolled: - await checkpoint_if_cancelled() - return self._fp.read1(size) - - return await super().read1(size) - - async def readline(self) -> AnyStr: - if not self._rolled: - await checkpoint_if_cancelled() - return self._fp.readline() - - return await super().readline() # type: ignore[return-value] - - async def readlines(self) -> list[AnyStr]: - if not self._rolled: - await checkpoint_if_cancelled() - return self._fp.readlines() - - return await super().readlines() # type: ignore[return-value] - - async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: - if not self._rolled: - await checkpoint_if_cancelled() - self._fp.readinto(b) - - return await super().readinto(b) - - async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: - if not self._rolled: - await checkpoint_if_cancelled() - self._fp.readinto(b) - - return await super().readinto1(b) - - async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: - if not self._rolled: - await checkpoint_if_cancelled() - return self._fp.seek(offset, whence) - - return await super().seek(offset, whence) - - async def tell(self) -> int: - if not self._rolled: - await checkpoint_if_cancelled() - return self._fp.tell() - - return await super().tell() - - async def truncate(self, size: int | None = None) -> int: - if not self._rolled: - await checkpoint_if_cancelled() - return self._fp.truncate(size) - - return await super().truncate(size) - - @overload - async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ... - @overload - async def write(self: SpooledTemporaryFile[str], b: str) -> int: ... - - async def write(self, b: ReadableBuffer | str) -> int: - """ - Asynchronously write data to the spooled temporary file. - - If the file has not yet been rolled over, the data is written synchronously, - and a rollover is triggered if the size exceeds the maximum size. - - :param s: The data to write. - :return: The number of bytes written. - :raises RuntimeError: If the underlying file is not initialized. - - """ - if not self._rolled: - await checkpoint_if_cancelled() - result = self._fp.write(b) - await self._check() - return result - - return await super().write(b) # type: ignore[misc] - - @overload - async def writelines( - self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer] - ) -> None: ... - @overload - async def writelines( - self: SpooledTemporaryFile[str], lines: Iterable[str] - ) -> None: ... - - async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None: - """ - Asynchronously write a list of lines to the spooled temporary file. - - If the file has not yet been rolled over, the lines are written synchronously, - and a rollover is triggered if the size exceeds the maximum size. - - :param lines: An iterable of lines to write. - :raises RuntimeError: If the underlying file is not initialized. - - """ - if not self._rolled: - await checkpoint_if_cancelled() - result = self._fp.writelines(lines) - await self._check() - return result - - return await super().writelines(lines) # type: ignore[misc] - - -class TemporaryDirectory(Generic[AnyStr]): - """ - An asynchronous temporary directory that is created and cleaned up automatically. - - This class provides an asynchronous context manager for creating a temporary - directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to - perform directory creation and cleanup operations in a background thread. - - :param suffix: Suffix to be added to the temporary directory name. - :param prefix: Prefix to be added to the temporary directory name. - :param dir: The parent directory where the temporary directory is created. - :param ignore_cleanup_errors: Whether to ignore errors during cleanup - (Python 3.10+). - :param delete: Whether to delete the directory upon closing (Python 3.12+). - """ - - def __init__( - self, - suffix: AnyStr | None = None, - prefix: AnyStr | None = None, - dir: AnyStr | None = None, - *, - ignore_cleanup_errors: bool = False, - delete: bool = True, - ) -> None: - self.suffix: AnyStr | None = suffix - self.prefix: AnyStr | None = prefix - self.dir: AnyStr | None = dir - self.ignore_cleanup_errors = ignore_cleanup_errors - self.delete = delete - - self._tempdir: tempfile.TemporaryDirectory | None = None - - async def __aenter__(self) -> str: - params: dict[str, Any] = { - "suffix": self.suffix, - "prefix": self.prefix, - "dir": self.dir, - } - if sys.version_info >= (3, 10): - params["ignore_cleanup_errors"] = self.ignore_cleanup_errors - - if sys.version_info >= (3, 12): - params["delete"] = self.delete - - self._tempdir = await to_thread.run_sync( - lambda: tempfile.TemporaryDirectory(**params) - ) - return await to_thread.run_sync(self._tempdir.__enter__) - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - traceback: TracebackType | None, - ) -> None: - if self._tempdir is not None: - await to_thread.run_sync( - self._tempdir.__exit__, exc_type, exc_value, traceback - ) - - async def cleanup(self) -> None: - if self._tempdir is not None: - await to_thread.run_sync(self._tempdir.cleanup) - - -@overload -async def mkstemp( - suffix: str | None = None, - prefix: str | None = None, - dir: str | None = None, - text: bool = False, -) -> tuple[int, str]: ... - - -@overload -async def mkstemp( - suffix: bytes | None = None, - prefix: bytes | None = None, - dir: bytes | None = None, - text: bool = False, -) -> tuple[int, bytes]: ... - - -async def mkstemp( - suffix: AnyStr | None = None, - prefix: AnyStr | None = None, - dir: AnyStr | None = None, - text: bool = False, -) -> tuple[int, str | bytes]: - """ - Asynchronously create a temporary file and return an OS-level handle and the file - name. - - This function wraps `tempfile.mkstemp` and executes it in a background thread. - - :param suffix: Suffix to be added to the file name. - :param prefix: Prefix to be added to the file name. - :param dir: Directory in which the temporary file is created. - :param text: Whether the file is opened in text mode. - :return: A tuple containing the file descriptor and the file name. - - """ - return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text) - - -@overload -async def mkdtemp( - suffix: str | None = None, - prefix: str | None = None, - dir: str | None = None, -) -> str: ... - - -@overload -async def mkdtemp( - suffix: bytes | None = None, - prefix: bytes | None = None, - dir: bytes | None = None, -) -> bytes: ... - - -async def mkdtemp( - suffix: AnyStr | None = None, - prefix: AnyStr | None = None, - dir: AnyStr | None = None, -) -> str | bytes: - """ - Asynchronously create a temporary directory and return its path. - - This function wraps `tempfile.mkdtemp` and executes it in a background thread. - - :param suffix: Suffix to be added to the directory name. - :param prefix: Prefix to be added to the directory name. - :param dir: Parent directory where the temporary directory is created. - :return: The path of the created temporary directory. - - """ - return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir) - - -async def gettempdir() -> str: - """ - Asynchronously return the name of the directory used for temporary files. - - This function wraps `tempfile.gettempdir` and executes it in a background thread. - - :return: The path of the temporary directory as a string. - - """ - return await to_thread.run_sync(tempfile.gettempdir) - - -async def gettempdirb() -> bytes: - """ - Asynchronously return the name of the directory used for temporary files in bytes. - - This function wraps `tempfile.gettempdirb` and executes it in a background thread. - - :return: The path of the temporary directory as bytes. - - """ - return await to_thread.run_sync(tempfile.gettempdirb) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_testing.py b/venv/lib/python3.12/site-packages/anyio/_core/_testing.py deleted file mode 100644 index 9e28b227..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_testing.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import annotations - -from collections.abc import Awaitable, Generator -from typing import Any, cast - -from ._eventloop import get_async_backend - - -class TaskInfo: - """ - Represents an asynchronous task. - - :ivar int id: the unique identifier of the task - :ivar parent_id: the identifier of the parent task, if any - :vartype parent_id: Optional[int] - :ivar str name: the description of the task (if any) - :ivar ~collections.abc.Coroutine coro: the coroutine object of the task - """ - - __slots__ = "_name", "id", "parent_id", "name", "coro" - - def __init__( - self, - id: int, - parent_id: int | None, - name: str | None, - coro: Generator[Any, Any, Any] | Awaitable[Any], - ): - func = get_current_task - self._name = f"{func.__module__}.{func.__qualname__}" - self.id: int = id - self.parent_id: int | None = parent_id - self.name: str | None = name - self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro - - def __eq__(self, other: object) -> bool: - if isinstance(other, TaskInfo): - return self.id == other.id - - return NotImplemented - - def __hash__(self) -> int: - return hash(self.id) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" - - def has_pending_cancellation(self) -> bool: - """ - Return ``True`` if the task has a cancellation pending, ``False`` otherwise. - - """ - return False - - -def get_current_task() -> TaskInfo: - """ - Return the current task. - - :return: a representation of the current task - - """ - return get_async_backend().get_current_task() - - -def get_running_tasks() -> list[TaskInfo]: - """ - Return a list of running tasks in the current event loop. - - :return: a list of task info objects - - """ - return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) - - -async def wait_all_tasks_blocked() -> None: - """Wait until all other tasks are waiting for something.""" - await get_async_backend().wait_all_tasks_blocked() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py b/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py deleted file mode 100644 index f358a448..00000000 --- a/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping -from typing import Any, TypeVar, final, overload - -from ._exceptions import TypedAttributeLookupError - -T_Attr = TypeVar("T_Attr") -T_Default = TypeVar("T_Default") -undefined = object() - - -def typed_attribute() -> Any: - """Return a unique object, used to mark typed attributes.""" - return object() - - -class TypedAttributeSet: - """ - Superclass for typed attribute collections. - - Checks that every public attribute of every subclass has a type annotation. - """ - - def __init_subclass__(cls) -> None: - annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) - for attrname in dir(cls): - if not attrname.startswith("_") and attrname not in annotations: - raise TypeError( - f"Attribute {attrname!r} is missing its type annotation" - ) - - super().__init_subclass__() - - -class TypedAttributeProvider: - """Base class for classes that wish to provide typed extra attributes.""" - - @property - def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: - """ - A mapping of the extra attributes to callables that return the corresponding - values. - - If the provider wraps another provider, the attributes from that wrapper should - also be included in the returned mapping (but the wrapper may override the - callables from the wrapped instance). - - """ - return {} - - @overload - def extra(self, attribute: T_Attr) -> T_Attr: ... - - @overload - def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... - - @final - def extra(self, attribute: Any, default: object = undefined) -> object: - """ - extra(attribute, default=undefined) - - Return the value of the given typed extra attribute. - - :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to - look for - :param default: the value that should be returned if no value is found for the - attribute - :raises ~anyio.TypedAttributeLookupError: if the search failed and no default - value was given - - """ - try: - getter = self.extra_attributes[attribute] - except KeyError: - if default is undefined: - raise TypedAttributeLookupError("Attribute not found") from None - else: - return default - - return getter() diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__init__.py b/venv/lib/python3.12/site-packages/anyio/abc/__init__.py deleted file mode 100644 index 3d3b61cc..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -from __future__ import annotations - -from ._eventloop import AsyncBackend as AsyncBackend -from ._resources import AsyncResource as AsyncResource -from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket -from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket -from ._sockets import IPAddressType as IPAddressType -from ._sockets import IPSockAddrType as IPSockAddrType -from ._sockets import SocketAttribute as SocketAttribute -from ._sockets import SocketListener as SocketListener -from ._sockets import SocketStream as SocketStream -from ._sockets import UDPPacketType as UDPPacketType -from ._sockets import UDPSocket as UDPSocket -from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType -from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket -from ._sockets import UNIXSocketStream as UNIXSocketStream -from ._streams import AnyByteReceiveStream as AnyByteReceiveStream -from ._streams import AnyByteSendStream as AnyByteSendStream -from ._streams import AnyByteStream as AnyByteStream -from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream -from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream -from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream -from ._streams import ByteReceiveStream as ByteReceiveStream -from ._streams import ByteSendStream as ByteSendStream -from ._streams import ByteStream as ByteStream -from ._streams import Listener as Listener -from ._streams import ObjectReceiveStream as ObjectReceiveStream -from ._streams import ObjectSendStream as ObjectSendStream -from ._streams import ObjectStream as ObjectStream -from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream -from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream -from ._streams import UnreliableObjectStream as UnreliableObjectStream -from ._subprocesses import Process as Process -from ._tasks import TaskGroup as TaskGroup -from ._tasks import TaskStatus as TaskStatus -from ._testing import TestRunner as TestRunner - -# Re-exported here, for backwards compatibility -# isort: off -from .._core._synchronization import ( - CapacityLimiter as CapacityLimiter, - Condition as Condition, - Event as Event, - Lock as Lock, - Semaphore as Semaphore, -) -from .._core._tasks import CancelScope as CancelScope -from ..from_thread import BlockingPortal as BlockingPortal - -# Re-export imports so they look like they live directly in this package -for __value in list(locals().values()): - if getattr(__value, "__module__", "").startswith("anyio.abc."): - __value.__module__ = __name__ - -del __value diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index b2ede499..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc deleted file mode 100644 index 146dc82c..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc deleted file mode 100644 index e00e0313..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc deleted file mode 100644 index 1053c357..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc deleted file mode 100644 index 513dc917..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc deleted file mode 100644 index 6f6ebc26..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc deleted file mode 100644 index fc08306e..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc deleted file mode 100644 index bbda3428..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py b/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py deleted file mode 100644 index 4cfce836..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py +++ /dev/null @@ -1,376 +0,0 @@ -from __future__ import annotations - -import math -import sys -from abc import ABCMeta, abstractmethod -from collections.abc import AsyncIterator, Awaitable, Callable, Sequence -from contextlib import AbstractContextManager -from os import PathLike -from signal import Signals -from socket import AddressFamily, SocketKind, socket -from typing import ( - IO, - TYPE_CHECKING, - Any, - TypeVar, - Union, - overload, -) - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - -if TYPE_CHECKING: - from _typeshed import HasFileno - - from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore - from .._core._tasks import CancelScope - from .._core._testing import TaskInfo - from ..from_thread import BlockingPortal - from ._sockets import ( - ConnectedUDPSocket, - ConnectedUNIXDatagramSocket, - IPSockAddrType, - SocketListener, - SocketStream, - UDPSocket, - UNIXDatagramSocket, - UNIXSocketStream, - ) - from ._subprocesses import Process - from ._tasks import TaskGroup - from ._testing import TestRunner - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") -StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] - - -class AsyncBackend(metaclass=ABCMeta): - @classmethod - @abstractmethod - def run( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - options: dict[str, Any], - ) -> T_Retval: - """ - Run the given coroutine function in an asynchronous event loop. - - The current thread must not be already running an event loop. - - :param func: a coroutine function - :param args: positional arguments to ``func`` - :param kwargs: positional arguments to ``func`` - :param options: keyword arguments to call the backend ``run()`` implementation - with - :return: the return value of the coroutine function - """ - - @classmethod - @abstractmethod - def current_token(cls) -> object: - """ - - :return: - """ - - @classmethod - @abstractmethod - def current_time(cls) -> float: - """ - Return the current value of the event loop's internal clock. - - :return: the clock value (seconds) - """ - - @classmethod - @abstractmethod - def cancelled_exception_class(cls) -> type[BaseException]: - """Return the exception class that is raised in a task if it's cancelled.""" - - @classmethod - @abstractmethod - async def checkpoint(cls) -> None: - """ - Check if the task has been cancelled, and allow rescheduling of other tasks. - - This is effectively the same as running :meth:`checkpoint_if_cancelled` and then - :meth:`cancel_shielded_checkpoint`. - """ - - @classmethod - async def checkpoint_if_cancelled(cls) -> None: - """ - Check if the current task group has been cancelled. - - This will check if the task has been cancelled, but will not allow other tasks - to be scheduled if not. - - """ - if cls.current_effective_deadline() == -math.inf: - await cls.checkpoint() - - @classmethod - async def cancel_shielded_checkpoint(cls) -> None: - """ - Allow the rescheduling of other tasks. - - This will give other tasks the opportunity to run, but without checking if the - current task group has been cancelled, unlike with :meth:`checkpoint`. - - """ - with cls.create_cancel_scope(shield=True): - await cls.sleep(0) - - @classmethod - @abstractmethod - async def sleep(cls, delay: float) -> None: - """ - Pause the current task for the specified duration. - - :param delay: the duration, in seconds - """ - - @classmethod - @abstractmethod - def create_cancel_scope( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - pass - - @classmethod - @abstractmethod - def current_effective_deadline(cls) -> float: - """ - Return the nearest deadline among all the cancel scopes effective for the - current task. - - :return: - - a clock value from the event loop's internal clock - - ``inf`` if there is no deadline in effect - - ``-inf`` if the current scope has been cancelled - :rtype: float - """ - - @classmethod - @abstractmethod - def create_task_group(cls) -> TaskGroup: - pass - - @classmethod - @abstractmethod - def create_event(cls) -> Event: - pass - - @classmethod - @abstractmethod - def create_lock(cls, *, fast_acquire: bool) -> Lock: - pass - - @classmethod - @abstractmethod - def create_semaphore( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - pass - - @classmethod - @abstractmethod - def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: - pass - - @classmethod - @abstractmethod - async def run_sync_in_worker_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - abandon_on_cancel: bool = False, - limiter: CapacityLimiter | None = None, - ) -> T_Retval: - pass - - @classmethod - @abstractmethod - def check_cancelled(cls) -> None: - pass - - @classmethod - @abstractmethod - def run_async_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - pass - - @classmethod - @abstractmethod - def run_sync_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - pass - - @classmethod - @abstractmethod - def create_blocking_portal(cls) -> BlockingPortal: - pass - - @classmethod - @abstractmethod - async def open_process( - cls, - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - **kwargs: Any, - ) -> Process: - pass - - @classmethod - @abstractmethod - def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: - pass - - @classmethod - @abstractmethod - async def connect_tcp( - cls, host: str, port: int, local_address: IPSockAddrType | None = None - ) -> SocketStream: - pass - - @classmethod - @abstractmethod - async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: - pass - - @classmethod - @abstractmethod - def create_tcp_listener(cls, sock: socket) -> SocketListener: - pass - - @classmethod - @abstractmethod - def create_unix_listener(cls, sock: socket) -> SocketListener: - pass - - @classmethod - @abstractmethod - async def create_udp_socket( - cls, - family: AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, - ) -> UDPSocket | ConnectedUDPSocket: - pass - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket, remote_path: None - ) -> UNIXDatagramSocket: ... - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket, remote_path: str | bytes - ) -> ConnectedUNIXDatagramSocket: ... - - @classmethod - @abstractmethod - async def create_unix_datagram_socket( - cls, raw_socket: socket, remote_path: str | bytes | None - ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: - pass - - @classmethod - @abstractmethod - async def getaddrinfo( - cls, - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, - ) -> Sequence[ - tuple[ - AddressFamily, - SocketKind, - int, - str, - tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], - ] - ]: - pass - - @classmethod - @abstractmethod - async def getnameinfo( - cls, sockaddr: IPSockAddrType, flags: int = 0 - ) -> tuple[str, str]: - pass - - @classmethod - @abstractmethod - async def wait_readable(cls, obj: HasFileno | int) -> None: - pass - - @classmethod - @abstractmethod - async def wait_writable(cls, obj: HasFileno | int) -> None: - pass - - @classmethod - @abstractmethod - def current_default_thread_limiter(cls) -> CapacityLimiter: - pass - - @classmethod - @abstractmethod - def open_signal_receiver( - cls, *signals: Signals - ) -> AbstractContextManager[AsyncIterator[Signals]]: - pass - - @classmethod - @abstractmethod - def get_current_task(cls) -> TaskInfo: - pass - - @classmethod - @abstractmethod - def get_running_tasks(cls) -> Sequence[TaskInfo]: - pass - - @classmethod - @abstractmethod - async def wait_all_tasks_blocked(cls) -> None: - pass - - @classmethod - @abstractmethod - def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: - pass diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_resources.py b/venv/lib/python3.12/site-packages/anyio/abc/_resources.py deleted file mode 100644 index 10df115a..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/_resources.py +++ /dev/null @@ -1,33 +0,0 @@ -from __future__ import annotations - -from abc import ABCMeta, abstractmethod -from types import TracebackType -from typing import TypeVar - -T = TypeVar("T") - - -class AsyncResource(metaclass=ABCMeta): - """ - Abstract base class for all closeable asynchronous resources. - - Works as an asynchronous context manager which returns the instance itself on enter, - and calls :meth:`aclose` on exit. - """ - - __slots__ = () - - async def __aenter__(self: T) -> T: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.aclose() - - @abstractmethod - async def aclose(self) -> None: - """Close the resource.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py b/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py deleted file mode 100644 index 1c6a450c..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py +++ /dev/null @@ -1,194 +0,0 @@ -from __future__ import annotations - -import socket -from abc import abstractmethod -from collections.abc import Callable, Collection, Mapping -from contextlib import AsyncExitStack -from io import IOBase -from ipaddress import IPv4Address, IPv6Address -from socket import AddressFamily -from types import TracebackType -from typing import Any, TypeVar, Union - -from .._core._typedattr import ( - TypedAttributeProvider, - TypedAttributeSet, - typed_attribute, -) -from ._streams import ByteStream, Listener, UnreliableObjectStream -from ._tasks import TaskGroup - -IPAddressType = Union[str, IPv4Address, IPv6Address] -IPSockAddrType = tuple[str, int] -SockAddrType = Union[IPSockAddrType, str] -UDPPacketType = tuple[bytes, IPSockAddrType] -UNIXDatagramPacketType = tuple[bytes, str] -T_Retval = TypeVar("T_Retval") - - -class _NullAsyncContextManager: - async def __aenter__(self) -> None: - pass - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return None - - -class SocketAttribute(TypedAttributeSet): - #: the address family of the underlying socket - family: AddressFamily = typed_attribute() - #: the local socket address of the underlying socket - local_address: SockAddrType = typed_attribute() - #: for IP addresses, the local port the underlying socket is bound to - local_port: int = typed_attribute() - #: the underlying stdlib socket object - raw_socket: socket.socket = typed_attribute() - #: the remote address the underlying socket is connected to - remote_address: SockAddrType = typed_attribute() - #: for IP addresses, the remote port the underlying socket is connected to - remote_port: int = typed_attribute() - - -class _SocketProvider(TypedAttributeProvider): - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - from .._core._sockets import convert_ipv6_sockaddr as convert - - attributes: dict[Any, Callable[[], Any]] = { - SocketAttribute.family: lambda: self._raw_socket.family, - SocketAttribute.local_address: lambda: convert( - self._raw_socket.getsockname() - ), - SocketAttribute.raw_socket: lambda: self._raw_socket, - } - try: - peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) - except OSError: - peername = None - - # Provide the remote address for connected sockets - if peername is not None: - attributes[SocketAttribute.remote_address] = lambda: peername - - # Provide local and remote ports for IP based sockets - if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): - attributes[SocketAttribute.local_port] = ( - lambda: self._raw_socket.getsockname()[1] - ) - if peername is not None: - remote_port = peername[1] - attributes[SocketAttribute.remote_port] = lambda: remote_port - - return attributes - - @property - @abstractmethod - def _raw_socket(self) -> socket.socket: - pass - - -class SocketStream(ByteStream, _SocketProvider): - """ - Transports bytes over a socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - -class UNIXSocketStream(SocketStream): - @abstractmethod - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - """ - Send file descriptors along with a message to the peer. - - :param message: a non-empty bytestring - :param fds: a collection of files (either numeric file descriptors or open file - or socket objects) - """ - - @abstractmethod - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - """ - Receive file descriptors along with a message from the peer. - - :param msglen: length of the message to expect from the peer - :param maxfds: maximum number of file descriptors to expect from the peer - :return: a tuple of (message, file descriptors) - """ - - -class SocketListener(Listener[SocketStream], _SocketProvider): - """ - Listens to incoming socket connections. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - @abstractmethod - async def accept(self) -> SocketStream: - """Accept an incoming connection.""" - - async def serve( - self, - handler: Callable[[SocketStream], Any], - task_group: TaskGroup | None = None, - ) -> None: - from .. import create_task_group - - async with AsyncExitStack() as stack: - if task_group is None: - task_group = await stack.enter_async_context(create_task_group()) - - while True: - stream = await self.accept() - task_group.start_soon(handler, stream) - - -class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): - """ - Represents an unconnected UDP socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - async def sendto(self, data: bytes, host: str, port: int) -> None: - """ - Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). - - """ - return await self.send((data, (host, port))) - - -class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): - """ - Represents an connected UDP socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - -class UNIXDatagramSocket( - UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider -): - """ - Represents an unconnected Unix datagram socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - async def sendto(self, data: bytes, path: str) -> None: - """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" - return await self.send((data, path)) - - -class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): - """ - Represents a connected Unix datagram socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_streams.py b/venv/lib/python3.12/site-packages/anyio/abc/_streams.py deleted file mode 100644 index f11d97b5..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/_streams.py +++ /dev/null @@ -1,203 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from collections.abc import Callable -from typing import Any, Generic, TypeVar, Union - -from .._core._exceptions import EndOfStream -from .._core._typedattr import TypedAttributeProvider -from ._resources import AsyncResource -from ._tasks import TaskGroup - -T_Item = TypeVar("T_Item") -T_co = TypeVar("T_co", covariant=True) -T_contra = TypeVar("T_contra", contravariant=True) - - -class UnreliableObjectReceiveStream( - Generic[T_co], AsyncResource, TypedAttributeProvider -): - """ - An interface for receiving objects. - - This interface makes no guarantees that the received messages arrive in the order in - which they were sent, or that no messages are missed. - - Asynchronously iterating over objects of this type will yield objects matching the - given type parameter. - """ - - def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: - return self - - async def __anext__(self) -> T_co: - try: - return await self.receive() - except EndOfStream: - raise StopAsyncIteration - - @abstractmethod - async def receive(self) -> T_co: - """ - Receive the next item. - - :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly - closed - :raises ~anyio.EndOfStream: if this stream has been closed from the other end - :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable - due to external causes - """ - - -class UnreliableObjectSendStream( - Generic[T_contra], AsyncResource, TypedAttributeProvider -): - """ - An interface for sending objects. - - This interface makes no guarantees that the messages sent will reach the - recipient(s) in the same order in which they were sent, or at all. - """ - - @abstractmethod - async def send(self, item: T_contra) -> None: - """ - Send an item to the peer(s). - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if the send stream has been explicitly - closed - :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable - due to external causes - """ - - -class UnreliableObjectStream( - UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] -): - """ - A bidirectional message stream which does not guarantee the order or reliability of - message delivery. - """ - - -class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): - """ - A receive message stream which guarantees that messages are received in the same - order in which they were sent, and that no messages are missed. - """ - - -class ObjectSendStream(UnreliableObjectSendStream[T_contra]): - """ - A send message stream which guarantees that messages are delivered in the same order - in which they were sent, without missing any messages in the middle. - """ - - -class ObjectStream( - ObjectReceiveStream[T_Item], - ObjectSendStream[T_Item], - UnreliableObjectStream[T_Item], -): - """ - A bidirectional message stream which guarantees the order and reliability of message - delivery. - """ - - @abstractmethod - async def send_eof(self) -> None: - """ - Send an end-of-file indication to the peer. - - You should not try to send any further data to this stream after calling this - method. This method is idempotent (does nothing on successive calls). - """ - - -class ByteReceiveStream(AsyncResource, TypedAttributeProvider): - """ - An interface for receiving bytes from a single peer. - - Iterating this byte stream will yield a byte string of arbitrary length, but no more - than 65536 bytes. - """ - - def __aiter__(self) -> ByteReceiveStream: - return self - - async def __anext__(self) -> bytes: - try: - return await self.receive() - except EndOfStream: - raise StopAsyncIteration - - @abstractmethod - async def receive(self, max_bytes: int = 65536) -> bytes: - """ - Receive at most ``max_bytes`` bytes from the peer. - - .. note:: Implementers of this interface should not return an empty - :class:`bytes` object, and users should ignore them. - - :param max_bytes: maximum number of bytes to receive - :return: the received bytes - :raises ~anyio.EndOfStream: if this stream has been closed from the other end - """ - - -class ByteSendStream(AsyncResource, TypedAttributeProvider): - """An interface for sending bytes to a single peer.""" - - @abstractmethod - async def send(self, item: bytes) -> None: - """ - Send the given bytes to the peer. - - :param item: the bytes to send - """ - - -class ByteStream(ByteReceiveStream, ByteSendStream): - """A bidirectional byte stream.""" - - @abstractmethod - async def send_eof(self) -> None: - """ - Send an end-of-file indication to the peer. - - You should not try to send any further data to this stream after calling this - method. This method is idempotent (does nothing on successive calls). - """ - - -#: Type alias for all unreliable bytes-oriented receive streams. -AnyUnreliableByteReceiveStream = Union[ - UnreliableObjectReceiveStream[bytes], ByteReceiveStream -] -#: Type alias for all unreliable bytes-oriented send streams. -AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] -#: Type alias for all unreliable bytes-oriented streams. -AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] -#: Type alias for all bytes-oriented receive streams. -AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] -#: Type alias for all bytes-oriented send streams. -AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] -#: Type alias for all bytes-oriented streams. -AnyByteStream = Union[ObjectStream[bytes], ByteStream] - - -class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): - """An interface for objects that let you accept incoming connections.""" - - @abstractmethod - async def serve( - self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None - ) -> None: - """ - Accept incoming connections as they come in and start tasks to handle them. - - :param handler: a callable that will be used to handle each accepted connection - :param task_group: the task group that will be used to start tasks for handling - each accepted connection (if omitted, an ad-hoc task group will be created) - """ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py b/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py deleted file mode 100644 index ce0564ce..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from signal import Signals - -from ._resources import AsyncResource -from ._streams import ByteReceiveStream, ByteSendStream - - -class Process(AsyncResource): - """An asynchronous version of :class:`subprocess.Popen`.""" - - @abstractmethod - async def wait(self) -> int: - """ - Wait until the process exits. - - :return: the exit code of the process - """ - - @abstractmethod - def terminate(self) -> None: - """ - Terminates the process, gracefully if possible. - - On Windows, this calls ``TerminateProcess()``. - On POSIX systems, this sends ``SIGTERM`` to the process. - - .. seealso:: :meth:`subprocess.Popen.terminate` - """ - - @abstractmethod - def kill(self) -> None: - """ - Kills the process. - - On Windows, this calls ``TerminateProcess()``. - On POSIX systems, this sends ``SIGKILL`` to the process. - - .. seealso:: :meth:`subprocess.Popen.kill` - """ - - @abstractmethod - def send_signal(self, signal: Signals) -> None: - """ - Send a signal to the subprocess. - - .. seealso:: :meth:`subprocess.Popen.send_signal` - - :param signal: the signal number (e.g. :data:`signal.SIGHUP`) - """ - - @property - @abstractmethod - def pid(self) -> int: - """The process ID of the process.""" - - @property - @abstractmethod - def returncode(self) -> int | None: - """ - The return code of the process. If the process has not yet terminated, this will - be ``None``. - """ - - @property - @abstractmethod - def stdin(self) -> ByteSendStream | None: - """The stream for the standard input of the process.""" - - @property - @abstractmethod - def stdout(self) -> ByteReceiveStream | None: - """The stream for the standard output of the process.""" - - @property - @abstractmethod - def stderr(self) -> ByteReceiveStream | None: - """The stream for the standard error output of the process.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py b/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py deleted file mode 100644 index f6e5c40c..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py +++ /dev/null @@ -1,101 +0,0 @@ -from __future__ import annotations - -import sys -from abc import ABCMeta, abstractmethod -from collections.abc import Awaitable, Callable -from types import TracebackType -from typing import TYPE_CHECKING, Any, Protocol, TypeVar, overload - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -if TYPE_CHECKING: - from .._core._tasks import CancelScope - -T_Retval = TypeVar("T_Retval") -T_contra = TypeVar("T_contra", contravariant=True) -PosArgsT = TypeVarTuple("PosArgsT") - - -class TaskStatus(Protocol[T_contra]): - @overload - def started(self: TaskStatus[None]) -> None: ... - - @overload - def started(self, value: T_contra) -> None: ... - - def started(self, value: T_contra | None = None) -> None: - """ - Signal that the task has started. - - :param value: object passed back to the starter of the task - """ - - -class TaskGroup(metaclass=ABCMeta): - """ - Groups several asynchronous tasks together. - - :ivar cancel_scope: the cancel scope inherited by all child tasks - :vartype cancel_scope: CancelScope - - .. note:: On asyncio, support for eager task factories is considered to be - **experimental**. In particular, they don't follow the usual semantics of new - tasks being scheduled on the next iteration of the event loop, and may thus - cause unexpected behavior in code that wasn't written with such semantics in - mind. - """ - - cancel_scope: CancelScope - - @abstractmethod - def start_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> None: - """ - Start a new task in this task group. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - - .. versionadded:: 3.0 - """ - - @abstractmethod - async def start( - self, - func: Callable[..., Awaitable[Any]], - *args: object, - name: object = None, - ) -> Any: - """ - Start a new task and wait until it signals for readiness. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - :return: the value passed to ``task_status.started()`` - :raises RuntimeError: if the task finishes without calling - ``task_status.started()`` - - .. versionadded:: 3.0 - """ - - @abstractmethod - async def __aenter__(self) -> TaskGroup: - """Enter the task group context and allow starting new tasks.""" - - @abstractmethod - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - """Exit the task group context waiting for all tasks to finish.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_testing.py b/venv/lib/python3.12/site-packages/anyio/abc/_testing.py deleted file mode 100644 index 7c50ed76..00000000 --- a/venv/lib/python3.12/site-packages/anyio/abc/_testing.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import annotations - -import types -from abc import ABCMeta, abstractmethod -from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable -from typing import Any, TypeVar - -_T = TypeVar("_T") - - -class TestRunner(metaclass=ABCMeta): - """ - Encapsulates a running event loop. Every call made through this object will use the - same event loop. - """ - - def __enter__(self) -> TestRunner: - return self - - @abstractmethod - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: types.TracebackType | None, - ) -> bool | None: ... - - @abstractmethod - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[_T, Any]], - kwargs: dict[str, Any], - ) -> Iterable[_T]: - """ - Run an async generator fixture. - - :param fixture_func: the fixture function - :param kwargs: keyword arguments to call the fixture function with - :return: an iterator yielding the value yielded from the async generator - """ - - @abstractmethod - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, _T]], - kwargs: dict[str, Any], - ) -> _T: - """ - Run an async fixture. - - :param fixture_func: the fixture function - :param kwargs: keyword arguments to call the fixture function with - :return: the return value of the fixture function - """ - - @abstractmethod - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - """ - Run an async test function. - - :param test_func: the test function - :param kwargs: keyword arguments to call the test function with - """ diff --git a/venv/lib/python3.12/site-packages/anyio/from_thread.py b/venv/lib/python3.12/site-packages/anyio/from_thread.py deleted file mode 100644 index 61790973..00000000 --- a/venv/lib/python3.12/site-packages/anyio/from_thread.py +++ /dev/null @@ -1,527 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import Awaitable, Callable, Generator -from concurrent.futures import Future -from contextlib import ( - AbstractAsyncContextManager, - AbstractContextManager, - contextmanager, -) -from dataclasses import dataclass, field -from inspect import isawaitable -from threading import Lock, Thread, get_ident -from types import TracebackType -from typing import ( - Any, - Generic, - TypeVar, - cast, - overload, -) - -from ._core import _eventloop -from ._core._eventloop import get_async_backend, get_cancelled_exc_class, threadlocals -from ._core._synchronization import Event -from ._core._tasks import CancelScope, create_task_group -from .abc import AsyncBackend -from .abc._tasks import TaskStatus - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -T_Retval = TypeVar("T_Retval") -T_co = TypeVar("T_co", covariant=True) -PosArgsT = TypeVarTuple("PosArgsT") - - -def run( - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], *args: Unpack[PosArgsT] -) -> T_Retval: - """ - Call a coroutine function from a worker thread. - - :param func: a coroutine function - :param args: positional arguments for the callable - :return: the return value of the coroutine function - - """ - try: - async_backend = threadlocals.current_async_backend - token = threadlocals.current_token - except AttributeError: - raise RuntimeError( - "This function can only be run from an AnyIO worker thread" - ) from None - - return async_backend.run_async_from_thread(func, args, token=token) - - -def run_sync( - func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] -) -> T_Retval: - """ - Call a function in the event loop thread from a worker thread. - - :param func: a callable - :param args: positional arguments for the callable - :return: the return value of the callable - - """ - try: - async_backend = threadlocals.current_async_backend - token = threadlocals.current_token - except AttributeError: - raise RuntimeError( - "This function can only be run from an AnyIO worker thread" - ) from None - - return async_backend.run_sync_from_thread(func, args, token=token) - - -class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): - _enter_future: Future[T_co] - _exit_future: Future[bool | None] - _exit_event: Event - _exit_exc_info: tuple[ - type[BaseException] | None, BaseException | None, TracebackType | None - ] = (None, None, None) - - def __init__( - self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal - ): - self._async_cm = async_cm - self._portal = portal - - async def run_async_cm(self) -> bool | None: - try: - self._exit_event = Event() - value = await self._async_cm.__aenter__() - except BaseException as exc: - self._enter_future.set_exception(exc) - raise - else: - self._enter_future.set_result(value) - - try: - # Wait for the sync context manager to exit. - # This next statement can raise `get_cancelled_exc_class()` if - # something went wrong in a task group in this async context - # manager. - await self._exit_event.wait() - finally: - # In case of cancellation, it could be that we end up here before - # `_BlockingAsyncContextManager.__exit__` is called, and an - # `_exit_exc_info` has been set. - result = await self._async_cm.__aexit__(*self._exit_exc_info) - return result - - def __enter__(self) -> T_co: - self._enter_future = Future() - self._exit_future = self._portal.start_task_soon(self.run_async_cm) - return self._enter_future.result() - - def __exit__( - self, - __exc_type: type[BaseException] | None, - __exc_value: BaseException | None, - __traceback: TracebackType | None, - ) -> bool | None: - self._exit_exc_info = __exc_type, __exc_value, __traceback - self._portal.call(self._exit_event.set) - return self._exit_future.result() - - -class _BlockingPortalTaskStatus(TaskStatus): - def __init__(self, future: Future): - self._future = future - - def started(self, value: object = None) -> None: - self._future.set_result(value) - - -class BlockingPortal: - """An object that lets external threads run code in an asynchronous event loop.""" - - def __new__(cls) -> BlockingPortal: - return get_async_backend().create_blocking_portal() - - def __init__(self) -> None: - self._event_loop_thread_id: int | None = get_ident() - self._stop_event = Event() - self._task_group = create_task_group() - self._cancelled_exc_class = get_cancelled_exc_class() - - async def __aenter__(self) -> BlockingPortal: - await self._task_group.__aenter__() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - await self.stop() - return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) - - def _check_running(self) -> None: - if self._event_loop_thread_id is None: - raise RuntimeError("This portal is not running") - if self._event_loop_thread_id == get_ident(): - raise RuntimeError( - "This method cannot be called from the event loop thread" - ) - - async def sleep_until_stopped(self) -> None: - """Sleep until :meth:`stop` is called.""" - await self._stop_event.wait() - - async def stop(self, cancel_remaining: bool = False) -> None: - """ - Signal the portal to shut down. - - This marks the portal as no longer accepting new calls and exits from - :meth:`sleep_until_stopped`. - - :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` - to let them finish before returning - - """ - self._event_loop_thread_id = None - self._stop_event.set() - if cancel_remaining: - self._task_group.cancel_scope.cancel() - - async def _call_func( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - future: Future[T_Retval], - ) -> None: - def callback(f: Future[T_Retval]) -> None: - if f.cancelled() and self._event_loop_thread_id not in ( - None, - get_ident(), - ): - self.call(scope.cancel) - - try: - retval_or_awaitable = func(*args, **kwargs) - if isawaitable(retval_or_awaitable): - with CancelScope() as scope: - if future.cancelled(): - scope.cancel() - else: - future.add_done_callback(callback) - - retval = await retval_or_awaitable - else: - retval = retval_or_awaitable - except self._cancelled_exc_class: - future.cancel() - future.set_running_or_notify_cancel() - except BaseException as exc: - if not future.cancelled(): - future.set_exception(exc) - - # Let base exceptions fall through - if not isinstance(exc, Exception): - raise - else: - if not future.cancelled(): - future.set_result(retval) - finally: - scope = None # type: ignore[assignment] - - def _spawn_task_from_thread( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - name: object, - future: Future[T_Retval], - ) -> None: - """ - Spawn a new task using the given callable. - - Implementers must ensure that the future is resolved when the task finishes. - - :param func: a callable - :param args: positional arguments to be passed to the callable - :param kwargs: keyword arguments to be passed to the callable - :param name: name of the task (will be coerced to a string if not ``None``) - :param future: a future that will resolve to the return value of the callable, - or the exception raised during its execution - - """ - raise NotImplementedError - - @overload - def call( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - *args: Unpack[PosArgsT], - ) -> T_Retval: ... - - @overload - def call( - self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] - ) -> T_Retval: ... - - def call( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - *args: Unpack[PosArgsT], - ) -> T_Retval: - """ - Call the given function in the event loop thread. - - If the callable returns a coroutine object, it is awaited on. - - :param func: any callable - :raises RuntimeError: if the portal is not running or if this method is called - from within the event loop thread - - """ - return cast(T_Retval, self.start_task_soon(func, *args).result()) - - @overload - def start_task_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> Future[T_Retval]: ... - - @overload - def start_task_soon( - self, - func: Callable[[Unpack[PosArgsT]], T_Retval], - *args: Unpack[PosArgsT], - name: object = None, - ) -> Future[T_Retval]: ... - - def start_task_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - *args: Unpack[PosArgsT], - name: object = None, - ) -> Future[T_Retval]: - """ - Start a task in the portal's task group. - - The task will be run inside a cancel scope which can be cancelled by cancelling - the returned future. - - :param func: the target function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a future that resolves with the return value of the callable if the - task completes successfully, or with the exception raised in the task - :raises RuntimeError: if the portal is not running or if this method is called - from within the event loop thread - :rtype: concurrent.futures.Future[T_Retval] - - .. versionadded:: 3.0 - - """ - self._check_running() - f: Future[T_Retval] = Future() - self._spawn_task_from_thread(func, args, {}, name, f) - return f - - def start_task( - self, - func: Callable[..., Awaitable[T_Retval]], - *args: object, - name: object = None, - ) -> tuple[Future[T_Retval], Any]: - """ - Start a task in the portal's task group and wait until it signals for readiness. - - This method works the same way as :meth:`.abc.TaskGroup.start`. - - :param func: the target function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a tuple of (future, task_status_value) where the ``task_status_value`` - is the value passed to ``task_status.started()`` from within the target - function - :rtype: tuple[concurrent.futures.Future[T_Retval], Any] - - .. versionadded:: 3.0 - - """ - - def task_done(future: Future[T_Retval]) -> None: - if not task_status_future.done(): - if future.cancelled(): - task_status_future.cancel() - elif future.exception(): - task_status_future.set_exception(future.exception()) - else: - exc = RuntimeError( - "Task exited without calling task_status.started()" - ) - task_status_future.set_exception(exc) - - self._check_running() - task_status_future: Future = Future() - task_status = _BlockingPortalTaskStatus(task_status_future) - f: Future = Future() - f.add_done_callback(task_done) - self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) - return f, task_status_future.result() - - def wrap_async_context_manager( - self, cm: AbstractAsyncContextManager[T_co] - ) -> AbstractContextManager[T_co]: - """ - Wrap an async context manager as a synchronous context manager via this portal. - - Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping - in the middle until the synchronous context manager exits. - - :param cm: an asynchronous context manager - :return: a synchronous context manager - - .. versionadded:: 2.1 - - """ - return _BlockingAsyncContextManager(cm, self) - - -@dataclass -class BlockingPortalProvider: - """ - A manager for a blocking portal. Used as a context manager. The first thread to - enter this context manager causes a blocking portal to be started with the specific - parameters, and the last thread to exit causes the portal to be shut down. Thus, - there will be exactly one blocking portal running in this context as long as at - least one thread has entered this context manager. - - The parameters are the same as for :func:`~anyio.run`. - - :param backend: name of the backend - :param backend_options: backend options - - .. versionadded:: 4.4 - """ - - backend: str = "asyncio" - backend_options: dict[str, Any] | None = None - _lock: Lock = field(init=False, default_factory=Lock) - _leases: int = field(init=False, default=0) - _portal: BlockingPortal = field(init=False) - _portal_cm: AbstractContextManager[BlockingPortal] | None = field( - init=False, default=None - ) - - def __enter__(self) -> BlockingPortal: - with self._lock: - if self._portal_cm is None: - self._portal_cm = start_blocking_portal( - self.backend, self.backend_options - ) - self._portal = self._portal_cm.__enter__() - - self._leases += 1 - return self._portal - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - portal_cm: AbstractContextManager[BlockingPortal] | None = None - with self._lock: - assert self._portal_cm - assert self._leases > 0 - self._leases -= 1 - if not self._leases: - portal_cm = self._portal_cm - self._portal_cm = None - del self._portal - - if portal_cm: - portal_cm.__exit__(None, None, None) - - -@contextmanager -def start_blocking_portal( - backend: str = "asyncio", backend_options: dict[str, Any] | None = None -) -> Generator[BlockingPortal, Any, None]: - """ - Start a new event loop in a new thread and run a blocking portal in its main task. - - The parameters are the same as for :func:`~anyio.run`. - - :param backend: name of the backend - :param backend_options: backend options - :return: a context manager that yields a blocking portal - - .. versionchanged:: 3.0 - Usage as a context manager is now required. - - """ - - async def run_portal() -> None: - async with BlockingPortal() as portal_: - future.set_result(portal_) - await portal_.sleep_until_stopped() - - def run_blocking_portal() -> None: - if future.set_running_or_notify_cancel(): - try: - _eventloop.run( - run_portal, backend=backend, backend_options=backend_options - ) - except BaseException as exc: - if not future.done(): - future.set_exception(exc) - - future: Future[BlockingPortal] = Future() - thread = Thread(target=run_blocking_portal, daemon=True) - thread.start() - try: - cancel_remaining_tasks = False - portal = future.result() - try: - yield portal - except BaseException: - cancel_remaining_tasks = True - raise - finally: - try: - portal.call(portal.stop, cancel_remaining_tasks) - except RuntimeError: - pass - finally: - thread.join() - - -def check_cancelled() -> None: - """ - Check if the cancel scope of the host task's running the current worker thread has - been cancelled. - - If the host task's current cancel scope has indeed been cancelled, the - backend-specific cancellation exception will be raised. - - :raises RuntimeError: if the current thread was not spawned by - :func:`.to_thread.run_sync` - - """ - try: - async_backend: AsyncBackend = threadlocals.current_async_backend - except AttributeError: - raise RuntimeError( - "This function can only be run from an AnyIO worker thread" - ) from None - - async_backend.check_cancelled() diff --git a/venv/lib/python3.12/site-packages/anyio/lowlevel.py b/venv/lib/python3.12/site-packages/anyio/lowlevel.py deleted file mode 100644 index 14c7668c..00000000 --- a/venv/lib/python3.12/site-packages/anyio/lowlevel.py +++ /dev/null @@ -1,161 +0,0 @@ -from __future__ import annotations - -import enum -from dataclasses import dataclass -from typing import Any, Generic, Literal, TypeVar, overload -from weakref import WeakKeyDictionary - -from ._core._eventloop import get_async_backend - -T = TypeVar("T") -D = TypeVar("D") - - -async def checkpoint() -> None: - """ - Check for cancellation and allow the scheduler to switch to another task. - - Equivalent to (but more efficient than):: - - await checkpoint_if_cancelled() - await cancel_shielded_checkpoint() - - - .. versionadded:: 3.0 - - """ - await get_async_backend().checkpoint() - - -async def checkpoint_if_cancelled() -> None: - """ - Enter a checkpoint if the enclosing cancel scope has been cancelled. - - This does not allow the scheduler to switch to a different task. - - .. versionadded:: 3.0 - - """ - await get_async_backend().checkpoint_if_cancelled() - - -async def cancel_shielded_checkpoint() -> None: - """ - Allow the scheduler to switch to another task but without checking for cancellation. - - Equivalent to (but potentially more efficient than):: - - with CancelScope(shield=True): - await checkpoint() - - - .. versionadded:: 3.0 - - """ - await get_async_backend().cancel_shielded_checkpoint() - - -def current_token() -> object: - """ - Return a backend specific token object that can be used to get back to the event - loop. - - """ - return get_async_backend().current_token() - - -_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() -_token_wrappers: dict[Any, _TokenWrapper] = {} - - -@dataclass(frozen=True) -class _TokenWrapper: - __slots__ = "_token", "__weakref__" - _token: object - - -class _NoValueSet(enum.Enum): - NO_VALUE_SET = enum.auto() - - -class RunvarToken(Generic[T]): - __slots__ = "_var", "_value", "_redeemed" - - def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): - self._var = var - self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value - self._redeemed = False - - -class RunVar(Generic[T]): - """ - Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. - """ - - __slots__ = "_name", "_default" - - NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET - - _token_wrappers: set[_TokenWrapper] = set() - - def __init__( - self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET - ): - self._name = name - self._default = default - - @property - def _current_vars(self) -> dict[str, T]: - token = current_token() - try: - return _run_vars[token] - except KeyError: - run_vars = _run_vars[token] = {} - return run_vars - - @overload - def get(self, default: D) -> T | D: ... - - @overload - def get(self) -> T: ... - - def get( - self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET - ) -> T | D: - try: - return self._current_vars[self._name] - except KeyError: - if default is not RunVar.NO_VALUE_SET: - return default - elif self._default is not RunVar.NO_VALUE_SET: - return self._default - - raise LookupError( - f'Run variable "{self._name}" has no value and no default set' - ) - - def set(self, value: T) -> RunvarToken[T]: - current_vars = self._current_vars - token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) - current_vars[self._name] = value - return token - - def reset(self, token: RunvarToken[T]) -> None: - if token._var is not self: - raise ValueError("This token does not belong to this RunVar") - - if token._redeemed: - raise ValueError("This token has already been used") - - if token._value is _NoValueSet.NO_VALUE_SET: - try: - del self._current_vars[self._name] - except KeyError: - pass - else: - self._current_vars[self._name] = token._value - - token._redeemed = True - - def __repr__(self) -> str: - return f"" diff --git a/venv/lib/python3.12/site-packages/anyio/py.typed b/venv/lib/python3.12/site-packages/anyio/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py b/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py deleted file mode 100644 index 21e4ab22..00000000 --- a/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py +++ /dev/null @@ -1,272 +0,0 @@ -from __future__ import annotations - -import socket -import sys -from collections.abc import Callable, Generator, Iterator -from contextlib import ExitStack, contextmanager -from inspect import isasyncgenfunction, iscoroutinefunction, ismethod -from typing import Any, cast - -import pytest -import sniffio -from _pytest.fixtures import SubRequest -from _pytest.outcomes import Exit - -from ._core._eventloop import get_all_backends, get_async_backend -from ._core._exceptions import iterate_exceptions -from .abc import TestRunner - -if sys.version_info < (3, 11): - from exceptiongroup import ExceptionGroup - -_current_runner: TestRunner | None = None -_runner_stack: ExitStack | None = None -_runner_leases = 0 - - -def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: - if isinstance(backend, str): - return backend, {} - elif isinstance(backend, tuple) and len(backend) == 2: - if isinstance(backend[0], str) and isinstance(backend[1], dict): - return cast(tuple[str, dict[str, Any]], backend) - - raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") - - -@contextmanager -def get_runner( - backend_name: str, backend_options: dict[str, Any] -) -> Iterator[TestRunner]: - global _current_runner, _runner_leases, _runner_stack - if _current_runner is None: - asynclib = get_async_backend(backend_name) - _runner_stack = ExitStack() - if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the - # async library - token = sniffio.current_async_library_cvar.set(backend_name) - _runner_stack.callback(sniffio.current_async_library_cvar.reset, token) - - backend_options = backend_options or {} - _current_runner = _runner_stack.enter_context( - asynclib.create_test_runner(backend_options) - ) - - _runner_leases += 1 - try: - yield _current_runner - finally: - _runner_leases -= 1 - if not _runner_leases: - assert _runner_stack is not None - _runner_stack.close() - _runner_stack = _current_runner = None - - -def pytest_configure(config: Any) -> None: - config.addinivalue_line( - "markers", - "anyio: mark the (coroutine function) test to be run asynchronously via anyio.", - ) - - -@pytest.hookimpl(hookwrapper=True) -def pytest_fixture_setup(fixturedef: Any, request: Any) -> Generator[Any]: - def wrapper( - *args: Any, anyio_backend: Any, request: SubRequest, **kwargs: Any - ) -> Any: - # Rebind any fixture methods to the request instance - if ( - request.instance - and ismethod(func) - and type(func.__self__) is type(request.instance) - ): - local_func = func.__func__.__get__(request.instance) - else: - local_func = func - - backend_name, backend_options = extract_backend_and_options(anyio_backend) - if has_backend_arg: - kwargs["anyio_backend"] = anyio_backend - - if has_request_arg: - kwargs["request"] = request - - with get_runner(backend_name, backend_options) as runner: - if isasyncgenfunction(local_func): - yield from runner.run_asyncgen_fixture(local_func, kwargs) - else: - yield runner.run_fixture(local_func, kwargs) - - # Only apply this to coroutine functions and async generator functions in requests - # that involve the anyio_backend fixture - func = fixturedef.func - if isasyncgenfunction(func) or iscoroutinefunction(func): - if "anyio_backend" in request.fixturenames: - fixturedef.func = wrapper - original_argname = fixturedef.argnames - - if not (has_backend_arg := "anyio_backend" in fixturedef.argnames): - fixturedef.argnames += ("anyio_backend",) - - if not (has_request_arg := "request" in fixturedef.argnames): - fixturedef.argnames += ("request",) - - try: - return (yield) - finally: - fixturedef.func = func - fixturedef.argnames = original_argname - - return (yield) - - -@pytest.hookimpl(tryfirst=True) -def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: - if collector.istestfunction(obj, name): - inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj - if iscoroutinefunction(inner_func): - marker = collector.get_closest_marker("anyio") - own_markers = getattr(obj, "pytestmark", ()) - if marker or any(marker.name == "anyio" for marker in own_markers): - pytest.mark.usefixtures("anyio_backend")(obj) - - -@pytest.hookimpl(tryfirst=True) -def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: - def run_with_hypothesis(**kwargs: Any) -> None: - with get_runner(backend_name, backend_options) as runner: - runner.run_test(original_func, kwargs) - - backend = pyfuncitem.funcargs.get("anyio_backend") - if backend: - backend_name, backend_options = extract_backend_and_options(backend) - - if hasattr(pyfuncitem.obj, "hypothesis"): - # Wrap the inner test function unless it's already wrapped - original_func = pyfuncitem.obj.hypothesis.inner_test - if original_func.__qualname__ != run_with_hypothesis.__qualname__: - if iscoroutinefunction(original_func): - pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis - - return None - - if iscoroutinefunction(pyfuncitem.obj): - funcargs = pyfuncitem.funcargs - testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} - with get_runner(backend_name, backend_options) as runner: - try: - runner.run_test(pyfuncitem.obj, testargs) - except ExceptionGroup as excgrp: - for exc in iterate_exceptions(excgrp): - if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)): - raise exc from excgrp - - raise - - return True - - return None - - -@pytest.fixture(scope="module", params=get_all_backends()) -def anyio_backend(request: Any) -> Any: - return request.param - - -@pytest.fixture -def anyio_backend_name(anyio_backend: Any) -> str: - if isinstance(anyio_backend, str): - return anyio_backend - else: - return anyio_backend[0] - - -@pytest.fixture -def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: - if isinstance(anyio_backend, str): - return {} - else: - return anyio_backend[1] - - -class FreePortFactory: - """ - Manages port generation based on specified socket kind, ensuring no duplicate - ports are generated. - - This class provides functionality for generating available free ports on the - system. It is initialized with a specific socket kind and can generate ports - for given address families while avoiding reuse of previously generated ports. - - Users should not instantiate this class directly, but use the - ``free_tcp_port_factory`` and ``free_udp_port_factory`` fixtures instead. For simple - uses cases, ``free_tcp_port`` and ``free_udp_port`` can be used instead. - """ - - def __init__(self, kind: socket.SocketKind) -> None: - self._kind = kind - self._generated = set[int]() - - @property - def kind(self) -> socket.SocketKind: - """ - The type of socket connection (e.g., :data:`~socket.SOCK_STREAM` or - :data:`~socket.SOCK_DGRAM`) used to bind for checking port availability - - """ - return self._kind - - def __call__(self, family: socket.AddressFamily | None = None) -> int: - """ - Return an unbound port for the given address family. - - :param family: if omitted, both IPv4 and IPv6 addresses will be tried - :return: a port number - - """ - if family is not None: - families = [family] - else: - families = [socket.AF_INET] - if socket.has_ipv6: - families.append(socket.AF_INET6) - - while True: - port = 0 - with ExitStack() as stack: - for family in families: - sock = stack.enter_context(socket.socket(family, self._kind)) - addr = "::1" if family == socket.AF_INET6 else "127.0.0.1" - try: - sock.bind((addr, port)) - except OSError: - break - - if not port: - port = sock.getsockname()[1] - else: - if port not in self._generated: - self._generated.add(port) - return port - - -@pytest.fixture(scope="session") -def free_tcp_port_factory() -> FreePortFactory: - return FreePortFactory(socket.SOCK_STREAM) - - -@pytest.fixture(scope="session") -def free_udp_port_factory() -> FreePortFactory: - return FreePortFactory(socket.SOCK_DGRAM) - - -@pytest.fixture -def free_tcp_port(free_tcp_port_factory: Callable[[], int]) -> int: - return free_tcp_port_factory() - - -@pytest.fixture -def free_udp_port(free_udp_port_factory: Callable[[], int]) -> int: - return free_udp_port_factory() diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__init__.py b/venv/lib/python3.12/site-packages/anyio/streams/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index be483adf..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc deleted file mode 100644 index 4cb32a10..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc deleted file mode 100644 index e640a552..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc deleted file mode 100644 index dc03e54d..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc deleted file mode 100644 index e6963ad1..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc deleted file mode 100644 index 51b64b80..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc deleted file mode 100644 index 3a8e0e98..00000000 Binary files a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/buffered.py b/venv/lib/python3.12/site-packages/anyio/streams/buffered.py deleted file mode 100644 index f5d5e836..00000000 --- a/venv/lib/python3.12/site-packages/anyio/streams/buffered.py +++ /dev/null @@ -1,119 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping -from dataclasses import dataclass, field -from typing import Any - -from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead -from ..abc import AnyByteReceiveStream, ByteReceiveStream - - -@dataclass(eq=False) -class BufferedByteReceiveStream(ByteReceiveStream): - """ - Wraps any bytes-based receive stream and uses a buffer to provide sophisticated - receiving capabilities in the form of a byte stream. - """ - - receive_stream: AnyByteReceiveStream - _buffer: bytearray = field(init=False, default_factory=bytearray) - _closed: bool = field(init=False, default=False) - - async def aclose(self) -> None: - await self.receive_stream.aclose() - self._closed = True - - @property - def buffer(self) -> bytes: - """The bytes currently in the buffer.""" - return bytes(self._buffer) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.receive_stream.extra_attributes - - async def receive(self, max_bytes: int = 65536) -> bytes: - if self._closed: - raise ClosedResourceError - - if self._buffer: - chunk = bytes(self._buffer[:max_bytes]) - del self._buffer[:max_bytes] - return chunk - elif isinstance(self.receive_stream, ByteReceiveStream): - return await self.receive_stream.receive(max_bytes) - else: - # With a bytes-oriented object stream, we need to handle any surplus bytes - # we get from the receive() call - chunk = await self.receive_stream.receive() - if len(chunk) > max_bytes: - # Save the surplus bytes in the buffer - self._buffer.extend(chunk[max_bytes:]) - return chunk[:max_bytes] - else: - return chunk - - async def receive_exactly(self, nbytes: int) -> bytes: - """ - Read exactly the given amount of bytes from the stream. - - :param nbytes: the number of bytes to read - :return: the bytes read - :raises ~anyio.IncompleteRead: if the stream was closed before the requested - amount of bytes could be read from the stream - - """ - while True: - remaining = nbytes - len(self._buffer) - if remaining <= 0: - retval = self._buffer[:nbytes] - del self._buffer[:nbytes] - return bytes(retval) - - try: - if isinstance(self.receive_stream, ByteReceiveStream): - chunk = await self.receive_stream.receive(remaining) - else: - chunk = await self.receive_stream.receive() - except EndOfStream as exc: - raise IncompleteRead from exc - - self._buffer.extend(chunk) - - async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: - """ - Read from the stream until the delimiter is found or max_bytes have been read. - - :param delimiter: the marker to look for in the stream - :param max_bytes: maximum number of bytes that will be read before raising - :exc:`~anyio.DelimiterNotFound` - :return: the bytes read (not including the delimiter) - :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter - was found - :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the - bytes read up to the maximum allowed - - """ - delimiter_size = len(delimiter) - offset = 0 - while True: - # Check if the delimiter can be found in the current buffer - index = self._buffer.find(delimiter, offset) - if index >= 0: - found = self._buffer[:index] - del self._buffer[: index + len(delimiter) :] - return bytes(found) - - # Check if the buffer is already at or over the limit - if len(self._buffer) >= max_bytes: - raise DelimiterNotFound(max_bytes) - - # Read more data into the buffer from the socket - try: - data = await self.receive_stream.receive() - except EndOfStream as exc: - raise IncompleteRead from exc - - # Move the offset forward and add the new data to the buffer - offset = max(len(self._buffer) - delimiter_size + 1, 0) - self._buffer.extend(data) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/file.py b/venv/lib/python3.12/site-packages/anyio/streams/file.py deleted file mode 100644 index f4924642..00000000 --- a/venv/lib/python3.12/site-packages/anyio/streams/file.py +++ /dev/null @@ -1,148 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping -from io import SEEK_SET, UnsupportedOperation -from os import PathLike -from pathlib import Path -from typing import Any, BinaryIO, cast - -from .. import ( - BrokenResourceError, - ClosedResourceError, - EndOfStream, - TypedAttributeSet, - to_thread, - typed_attribute, -) -from ..abc import ByteReceiveStream, ByteSendStream - - -class FileStreamAttribute(TypedAttributeSet): - #: the open file descriptor - file: BinaryIO = typed_attribute() - #: the path of the file on the file system, if available (file must be a real file) - path: Path = typed_attribute() - #: the file number, if available (file must be a real file or a TTY) - fileno: int = typed_attribute() - - -class _BaseFileStream: - def __init__(self, file: BinaryIO): - self._file = file - - async def aclose(self) -> None: - await to_thread.run_sync(self._file.close) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - attributes: dict[Any, Callable[[], Any]] = { - FileStreamAttribute.file: lambda: self._file, - } - - if hasattr(self._file, "name"): - attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) - - try: - self._file.fileno() - except UnsupportedOperation: - pass - else: - attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() - - return attributes - - -class FileReadStream(_BaseFileStream, ByteReceiveStream): - """ - A byte stream that reads from a file in the file system. - - :param file: a file that has been opened for reading in binary mode - - .. versionadded:: 3.0 - """ - - @classmethod - async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: - """ - Create a file read stream by opening the given file. - - :param path: path of the file to read from - - """ - file = await to_thread.run_sync(Path(path).open, "rb") - return cls(cast(BinaryIO, file)) - - async def receive(self, max_bytes: int = 65536) -> bytes: - try: - data = await to_thread.run_sync(self._file.read, max_bytes) - except ValueError: - raise ClosedResourceError from None - except OSError as exc: - raise BrokenResourceError from exc - - if data: - return data - else: - raise EndOfStream - - async def seek(self, position: int, whence: int = SEEK_SET) -> int: - """ - Seek the file to the given position. - - .. seealso:: :meth:`io.IOBase.seek` - - .. note:: Not all file descriptors are seekable. - - :param position: position to seek the file to - :param whence: controls how ``position`` is interpreted - :return: the new absolute position - :raises OSError: if the file is not seekable - - """ - return await to_thread.run_sync(self._file.seek, position, whence) - - async def tell(self) -> int: - """ - Return the current stream position. - - .. note:: Not all file descriptors are seekable. - - :return: the current absolute position - :raises OSError: if the file is not seekable - - """ - return await to_thread.run_sync(self._file.tell) - - -class FileWriteStream(_BaseFileStream, ByteSendStream): - """ - A byte stream that writes to a file in the file system. - - :param file: a file that has been opened for writing in binary mode - - .. versionadded:: 3.0 - """ - - @classmethod - async def from_path( - cls, path: str | PathLike[str], append: bool = False - ) -> FileWriteStream: - """ - Create a file write stream by opening the given file for writing. - - :param path: path of the file to write to - :param append: if ``True``, open the file for appending; if ``False``, any - existing file at the given path will be truncated - - """ - mode = "ab" if append else "wb" - file = await to_thread.run_sync(Path(path).open, mode) - return cls(cast(BinaryIO, file)) - - async def send(self, item: bytes) -> None: - try: - await to_thread.run_sync(self._file.write, item) - except ValueError: - raise ClosedResourceError from None - except OSError as exc: - raise BrokenResourceError from exc diff --git a/venv/lib/python3.12/site-packages/anyio/streams/memory.py b/venv/lib/python3.12/site-packages/anyio/streams/memory.py deleted file mode 100644 index 83bf1d97..00000000 --- a/venv/lib/python3.12/site-packages/anyio/streams/memory.py +++ /dev/null @@ -1,317 +0,0 @@ -from __future__ import annotations - -import warnings -from collections import OrderedDict, deque -from dataclasses import dataclass, field -from types import TracebackType -from typing import Generic, NamedTuple, TypeVar - -from .. import ( - BrokenResourceError, - ClosedResourceError, - EndOfStream, - WouldBlock, -) -from .._core._testing import TaskInfo, get_current_task -from ..abc import Event, ObjectReceiveStream, ObjectSendStream -from ..lowlevel import checkpoint - -T_Item = TypeVar("T_Item") -T_co = TypeVar("T_co", covariant=True) -T_contra = TypeVar("T_contra", contravariant=True) - - -class MemoryObjectStreamStatistics(NamedTuple): - current_buffer_used: int #: number of items stored in the buffer - #: maximum number of items that can be stored on this stream (or :data:`math.inf`) - max_buffer_size: float - open_send_streams: int #: number of unclosed clones of the send stream - open_receive_streams: int #: number of unclosed clones of the receive stream - #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` - tasks_waiting_send: int - #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` - tasks_waiting_receive: int - - -@dataclass(eq=False) -class MemoryObjectItemReceiver(Generic[T_Item]): - task_info: TaskInfo = field(init=False, default_factory=get_current_task) - item: T_Item = field(init=False) - - def __repr__(self) -> str: - # When item is not defined, we get following error with default __repr__: - # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item' - item = getattr(self, "item", None) - return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})" - - -@dataclass(eq=False) -class MemoryObjectStreamState(Generic[T_Item]): - max_buffer_size: float = field() - buffer: deque[T_Item] = field(init=False, default_factory=deque) - open_send_channels: int = field(init=False, default=0) - open_receive_channels: int = field(init=False, default=0) - waiting_receivers: OrderedDict[Event, MemoryObjectItemReceiver[T_Item]] = field( - init=False, default_factory=OrderedDict - ) - waiting_senders: OrderedDict[Event, T_Item] = field( - init=False, default_factory=OrderedDict - ) - - def statistics(self) -> MemoryObjectStreamStatistics: - return MemoryObjectStreamStatistics( - len(self.buffer), - self.max_buffer_size, - self.open_send_channels, - self.open_receive_channels, - len(self.waiting_senders), - len(self.waiting_receivers), - ) - - -@dataclass(eq=False) -class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): - _state: MemoryObjectStreamState[T_co] - _closed: bool = field(init=False, default=False) - - def __post_init__(self) -> None: - self._state.open_receive_channels += 1 - - def receive_nowait(self) -> T_co: - """ - Receive the next item if it can be done without waiting. - - :return: the received item - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been - closed from the sending end - :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks - waiting to send - - """ - if self._closed: - raise ClosedResourceError - - if self._state.waiting_senders: - # Get the item from the next sender - send_event, item = self._state.waiting_senders.popitem(last=False) - self._state.buffer.append(item) - send_event.set() - - if self._state.buffer: - return self._state.buffer.popleft() - elif not self._state.open_send_channels: - raise EndOfStream - - raise WouldBlock - - async def receive(self) -> T_co: - await checkpoint() - try: - return self.receive_nowait() - except WouldBlock: - # Add ourselves in the queue - receive_event = Event() - receiver = MemoryObjectItemReceiver[T_co]() - self._state.waiting_receivers[receive_event] = receiver - - try: - await receive_event.wait() - finally: - self._state.waiting_receivers.pop(receive_event, None) - - try: - return receiver.item - except AttributeError: - raise EndOfStream from None - - def clone(self) -> MemoryObjectReceiveStream[T_co]: - """ - Create a clone of this receive stream. - - Each clone can be closed separately. Only when all clones have been closed will - the receiving end of the memory stream be considered closed by the sending ends. - - :return: the cloned stream - - """ - if self._closed: - raise ClosedResourceError - - return MemoryObjectReceiveStream(_state=self._state) - - def close(self) -> None: - """ - Close the stream. - - This works the exact same way as :meth:`aclose`, but is provided as a special - case for the benefit of synchronous callbacks. - - """ - if not self._closed: - self._closed = True - self._state.open_receive_channels -= 1 - if self._state.open_receive_channels == 0: - send_events = list(self._state.waiting_senders.keys()) - for event in send_events: - event.set() - - async def aclose(self) -> None: - self.close() - - def statistics(self) -> MemoryObjectStreamStatistics: - """ - Return statistics about the current state of this stream. - - .. versionadded:: 3.0 - """ - return self._state.statistics() - - def __enter__(self) -> MemoryObjectReceiveStream[T_co]: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() - - def __del__(self) -> None: - if not self._closed: - warnings.warn( - f"Unclosed <{self.__class__.__name__} at {id(self):x}>", - ResourceWarning, - source=self, - ) - - -@dataclass(eq=False) -class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): - _state: MemoryObjectStreamState[T_contra] - _closed: bool = field(init=False, default=False) - - def __post_init__(self) -> None: - self._state.open_send_channels += 1 - - def send_nowait(self, item: T_contra) -> None: - """ - Send an item immediately if it can be done without waiting. - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.BrokenResourceError: if the stream has been closed from the - receiving end - :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting - to receive - - """ - if self._closed: - raise ClosedResourceError - if not self._state.open_receive_channels: - raise BrokenResourceError - - while self._state.waiting_receivers: - receive_event, receiver = self._state.waiting_receivers.popitem(last=False) - if not receiver.task_info.has_pending_cancellation(): - receiver.item = item - receive_event.set() - return - - if len(self._state.buffer) < self._state.max_buffer_size: - self._state.buffer.append(item) - else: - raise WouldBlock - - async def send(self, item: T_contra) -> None: - """ - Send an item to the stream. - - If the buffer is full, this method blocks until there is again room in the - buffer or the item can be sent directly to a receiver. - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.BrokenResourceError: if the stream has been closed from the - receiving end - - """ - await checkpoint() - try: - self.send_nowait(item) - except WouldBlock: - # Wait until there's someone on the receiving end - send_event = Event() - self._state.waiting_senders[send_event] = item - try: - await send_event.wait() - except BaseException: - self._state.waiting_senders.pop(send_event, None) - raise - - if send_event in self._state.waiting_senders: - del self._state.waiting_senders[send_event] - raise BrokenResourceError from None - - def clone(self) -> MemoryObjectSendStream[T_contra]: - """ - Create a clone of this send stream. - - Each clone can be closed separately. Only when all clones have been closed will - the sending end of the memory stream be considered closed by the receiving ends. - - :return: the cloned stream - - """ - if self._closed: - raise ClosedResourceError - - return MemoryObjectSendStream(_state=self._state) - - def close(self) -> None: - """ - Close the stream. - - This works the exact same way as :meth:`aclose`, but is provided as a special - case for the benefit of synchronous callbacks. - - """ - if not self._closed: - self._closed = True - self._state.open_send_channels -= 1 - if self._state.open_send_channels == 0: - receive_events = list(self._state.waiting_receivers.keys()) - self._state.waiting_receivers.clear() - for event in receive_events: - event.set() - - async def aclose(self) -> None: - self.close() - - def statistics(self) -> MemoryObjectStreamStatistics: - """ - Return statistics about the current state of this stream. - - .. versionadded:: 3.0 - """ - return self._state.statistics() - - def __enter__(self) -> MemoryObjectSendStream[T_contra]: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() - - def __del__(self) -> None: - if not self._closed: - warnings.warn( - f"Unclosed <{self.__class__.__name__} at {id(self):x}>", - ResourceWarning, - source=self, - ) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/stapled.py b/venv/lib/python3.12/site-packages/anyio/streams/stapled.py deleted file mode 100644 index 80f64a2e..00000000 --- a/venv/lib/python3.12/site-packages/anyio/streams/stapled.py +++ /dev/null @@ -1,141 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping, Sequence -from dataclasses import dataclass -from typing import Any, Generic, TypeVar - -from ..abc import ( - ByteReceiveStream, - ByteSendStream, - ByteStream, - Listener, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, - TaskGroup, -) - -T_Item = TypeVar("T_Item") -T_Stream = TypeVar("T_Stream") - - -@dataclass(eq=False) -class StapledByteStream(ByteStream): - """ - Combines two byte streams into a single, bidirectional byte stream. - - Extra attributes will be provided from both streams, with the receive stream - providing the values in case of a conflict. - - :param ByteSendStream send_stream: the sending byte stream - :param ByteReceiveStream receive_stream: the receiving byte stream - """ - - send_stream: ByteSendStream - receive_stream: ByteReceiveStream - - async def receive(self, max_bytes: int = 65536) -> bytes: - return await self.receive_stream.receive(max_bytes) - - async def send(self, item: bytes) -> None: - await self.send_stream.send(item) - - async def send_eof(self) -> None: - await self.send_stream.aclose() - - async def aclose(self) -> None: - await self.send_stream.aclose() - await self.receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.send_stream.extra_attributes, - **self.receive_stream.extra_attributes, - } - - -@dataclass(eq=False) -class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): - """ - Combines two object streams into a single, bidirectional object stream. - - Extra attributes will be provided from both streams, with the receive stream - providing the values in case of a conflict. - - :param ObjectSendStream send_stream: the sending object stream - :param ObjectReceiveStream receive_stream: the receiving object stream - """ - - send_stream: ObjectSendStream[T_Item] - receive_stream: ObjectReceiveStream[T_Item] - - async def receive(self) -> T_Item: - return await self.receive_stream.receive() - - async def send(self, item: T_Item) -> None: - await self.send_stream.send(item) - - async def send_eof(self) -> None: - await self.send_stream.aclose() - - async def aclose(self) -> None: - await self.send_stream.aclose() - await self.receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.send_stream.extra_attributes, - **self.receive_stream.extra_attributes, - } - - -@dataclass(eq=False) -class MultiListener(Generic[T_Stream], Listener[T_Stream]): - """ - Combines multiple listeners into one, serving connections from all of them at once. - - Any MultiListeners in the given collection of listeners will have their listeners - moved into this one. - - Extra attributes are provided from each listener, with each successive listener - overriding any conflicting attributes from the previous one. - - :param listeners: listeners to serve - :type listeners: Sequence[Listener[T_Stream]] - """ - - listeners: Sequence[Listener[T_Stream]] - - def __post_init__(self) -> None: - listeners: list[Listener[T_Stream]] = [] - for listener in self.listeners: - if isinstance(listener, MultiListener): - listeners.extend(listener.listeners) - del listener.listeners[:] # type: ignore[attr-defined] - else: - listeners.append(listener) - - self.listeners = listeners - - async def serve( - self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None - ) -> None: - from .. import create_task_group - - async with create_task_group() as tg: - for listener in self.listeners: - tg.start_soon(listener.serve, handler, task_group) - - async def aclose(self) -> None: - for listener in self.listeners: - await listener.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - attributes: dict = {} - for listener in self.listeners: - attributes.update(listener.extra_attributes) - - return attributes diff --git a/venv/lib/python3.12/site-packages/anyio/streams/text.py b/venv/lib/python3.12/site-packages/anyio/streams/text.py deleted file mode 100644 index f1a11278..00000000 --- a/venv/lib/python3.12/site-packages/anyio/streams/text.py +++ /dev/null @@ -1,147 +0,0 @@ -from __future__ import annotations - -import codecs -from collections.abc import Callable, Mapping -from dataclasses import InitVar, dataclass, field -from typing import Any - -from ..abc import ( - AnyByteReceiveStream, - AnyByteSendStream, - AnyByteStream, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, -) - - -@dataclass(eq=False) -class TextReceiveStream(ObjectReceiveStream[str]): - """ - Stream wrapper that decodes bytes to strings using the given encoding. - - Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any - completely received unicode characters as soon as they come in. - - :param transport_stream: any bytes-based receive stream - :param encoding: character encoding to use for decoding bytes to strings (defaults - to ``utf-8``) - :param errors: handling scheme for decoding errors (defaults to ``strict``; see the - `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: - https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteReceiveStream - encoding: InitVar[str] = "utf-8" - errors: InitVar[str] = "strict" - _decoder: codecs.IncrementalDecoder = field(init=False) - - def __post_init__(self, encoding: str, errors: str) -> None: - decoder_class = codecs.getincrementaldecoder(encoding) - self._decoder = decoder_class(errors=errors) - - async def receive(self) -> str: - while True: - chunk = await self.transport_stream.receive() - decoded = self._decoder.decode(chunk) - if decoded: - return decoded - - async def aclose(self) -> None: - await self.transport_stream.aclose() - self._decoder.reset() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.transport_stream.extra_attributes - - -@dataclass(eq=False) -class TextSendStream(ObjectSendStream[str]): - """ - Sends strings to the wrapped stream as bytes using the given encoding. - - :param AnyByteSendStream transport_stream: any bytes-based send stream - :param str encoding: character encoding to use for encoding strings to bytes - (defaults to ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see - the `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: - https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteSendStream - encoding: InitVar[str] = "utf-8" - errors: str = "strict" - _encoder: Callable[..., tuple[bytes, int]] = field(init=False) - - def __post_init__(self, encoding: str) -> None: - self._encoder = codecs.getencoder(encoding) - - async def send(self, item: str) -> None: - encoded = self._encoder(item, self.errors)[0] - await self.transport_stream.send(encoded) - - async def aclose(self) -> None: - await self.transport_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.transport_stream.extra_attributes - - -@dataclass(eq=False) -class TextStream(ObjectStream[str]): - """ - A bidirectional stream that decodes bytes to strings on receive and encodes strings - to bytes on send. - - Extra attributes will be provided from both streams, with the receive stream - providing the values in case of a conflict. - - :param AnyByteStream transport_stream: any bytes-based stream - :param str encoding: character encoding to use for encoding/decoding strings to/from - bytes (defaults to ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see - the `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: - https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteStream - encoding: InitVar[str] = "utf-8" - errors: InitVar[str] = "strict" - _receive_stream: TextReceiveStream = field(init=False) - _send_stream: TextSendStream = field(init=False) - - def __post_init__(self, encoding: str, errors: str) -> None: - self._receive_stream = TextReceiveStream( - self.transport_stream, encoding=encoding, errors=errors - ) - self._send_stream = TextSendStream( - self.transport_stream, encoding=encoding, errors=errors - ) - - async def receive(self) -> str: - return await self._receive_stream.receive() - - async def send(self, item: str) -> None: - await self._send_stream.send(item) - - async def send_eof(self) -> None: - await self.transport_stream.send_eof() - - async def aclose(self) -> None: - await self._send_stream.aclose() - await self._receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self._send_stream.extra_attributes, - **self._receive_stream.extra_attributes, - } diff --git a/venv/lib/python3.12/site-packages/anyio/streams/tls.py b/venv/lib/python3.12/site-packages/anyio/streams/tls.py deleted file mode 100644 index 70a41cc7..00000000 --- a/venv/lib/python3.12/site-packages/anyio/streams/tls.py +++ /dev/null @@ -1,352 +0,0 @@ -from __future__ import annotations - -import logging -import re -import ssl -import sys -from collections.abc import Callable, Mapping -from dataclasses import dataclass -from functools import wraps -from typing import Any, TypeVar - -from .. import ( - BrokenResourceError, - EndOfStream, - aclose_forcefully, - get_cancelled_exc_class, - to_thread, -) -from .._core._typedattr import TypedAttributeSet, typed_attribute -from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") -_PCTRTT = tuple[tuple[str, str], ...] -_PCTRTTT = tuple[_PCTRTT, ...] - - -class TLSAttribute(TypedAttributeSet): - """Contains Transport Layer Security related attributes.""" - - #: the selected ALPN protocol - alpn_protocol: str | None = typed_attribute() - #: the channel binding for type ``tls-unique`` - channel_binding_tls_unique: bytes = typed_attribute() - #: the selected cipher - cipher: tuple[str, str, int] = typed_attribute() - #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` - # for more information) - peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() - #: the peer certificate in binary form - peer_certificate_binary: bytes | None = typed_attribute() - #: ``True`` if this is the server side of the connection - server_side: bool = typed_attribute() - #: ciphers shared by the client during the TLS handshake (``None`` if this is the - #: client side) - shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() - #: the :class:`~ssl.SSLObject` used for encryption - ssl_object: ssl.SSLObject = typed_attribute() - #: ``True`` if this stream does (and expects) a closing TLS handshake when the - #: stream is being closed - standard_compatible: bool = typed_attribute() - #: the TLS protocol version (e.g. ``TLSv1.2``) - tls_version: str = typed_attribute() - - -@dataclass(eq=False) -class TLSStream(ByteStream): - """ - A stream wrapper that encrypts all sent data and decrypts received data. - - This class has no public initializer; use :meth:`wrap` instead. - All extra attributes from :class:`~TLSAttribute` are supported. - - :var AnyByteStream transport_stream: the wrapped stream - - """ - - transport_stream: AnyByteStream - standard_compatible: bool - _ssl_object: ssl.SSLObject - _read_bio: ssl.MemoryBIO - _write_bio: ssl.MemoryBIO - - @classmethod - async def wrap( - cls, - transport_stream: AnyByteStream, - *, - server_side: bool | None = None, - hostname: str | None = None, - ssl_context: ssl.SSLContext | None = None, - standard_compatible: bool = True, - ) -> TLSStream: - """ - Wrap an existing stream with Transport Layer Security. - - This performs a TLS handshake with the peer. - - :param transport_stream: a bytes-transporting stream to wrap - :param server_side: ``True`` if this is the server side of the connection, - ``False`` if this is the client side (if omitted, will be set to ``False`` - if ``hostname`` has been provided, ``False`` otherwise). Used only to create - a default context when an explicit context has not been provided. - :param hostname: host name of the peer (if host name checking is desired) - :param ssl_context: the SSLContext object to use (if not provided, a secure - default will be created) - :param standard_compatible: if ``False``, skip the closing handshake when - closing the connection, and don't raise an exception if the peer does the - same - :raises ~ssl.SSLError: if the TLS handshake fails - - """ - if server_side is None: - server_side = not hostname - - if not ssl_context: - purpose = ( - ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH - ) - ssl_context = ssl.create_default_context(purpose) - - # Re-enable detection of unexpected EOFs if it was disabled by Python - if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): - ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF - - bio_in = ssl.MemoryBIO() - bio_out = ssl.MemoryBIO() - - # External SSLContext implementations may do blocking I/O in wrap_bio(), - # but the standard library implementation won't - if type(ssl_context) is ssl.SSLContext: - ssl_object = ssl_context.wrap_bio( - bio_in, bio_out, server_side=server_side, server_hostname=hostname - ) - else: - ssl_object = await to_thread.run_sync( - ssl_context.wrap_bio, - bio_in, - bio_out, - server_side, - hostname, - None, - ) - - wrapper = cls( - transport_stream=transport_stream, - standard_compatible=standard_compatible, - _ssl_object=ssl_object, - _read_bio=bio_in, - _write_bio=bio_out, - ) - await wrapper._call_sslobject_method(ssl_object.do_handshake) - return wrapper - - async def _call_sslobject_method( - self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] - ) -> T_Retval: - while True: - try: - result = func(*args) - except ssl.SSLWantReadError: - try: - # Flush any pending writes first - if self._write_bio.pending: - await self.transport_stream.send(self._write_bio.read()) - - data = await self.transport_stream.receive() - except EndOfStream: - self._read_bio.write_eof() - except OSError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - raise BrokenResourceError from exc - else: - self._read_bio.write(data) - except ssl.SSLWantWriteError: - await self.transport_stream.send(self._write_bio.read()) - except ssl.SSLSyscallError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - raise BrokenResourceError from exc - except ssl.SSLError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - if isinstance(exc, ssl.SSLEOFError) or ( - exc.strerror and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror - ): - if self.standard_compatible: - raise BrokenResourceError from exc - else: - raise EndOfStream from None - - raise - else: - # Flush any pending writes first - if self._write_bio.pending: - await self.transport_stream.send(self._write_bio.read()) - - return result - - async def unwrap(self) -> tuple[AnyByteStream, bytes]: - """ - Does the TLS closing handshake. - - :return: a tuple of (wrapped byte stream, bytes left in the read buffer) - - """ - await self._call_sslobject_method(self._ssl_object.unwrap) - self._read_bio.write_eof() - self._write_bio.write_eof() - return self.transport_stream, self._read_bio.read() - - async def aclose(self) -> None: - if self.standard_compatible: - try: - await self.unwrap() - except BaseException: - await aclose_forcefully(self.transport_stream) - raise - - await self.transport_stream.aclose() - - async def receive(self, max_bytes: int = 65536) -> bytes: - data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) - if not data: - raise EndOfStream - - return data - - async def send(self, item: bytes) -> None: - await self._call_sslobject_method(self._ssl_object.write, item) - - async def send_eof(self) -> None: - tls_version = self.extra(TLSAttribute.tls_version) - match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) - if match: - major, minor = int(match.group(1)), int(match.group(2) or 0) - if (major, minor) < (1, 3): - raise NotImplementedError( - f"send_eof() requires at least TLSv1.3; current " - f"session uses {tls_version}" - ) - - raise NotImplementedError( - "send_eof() has not yet been implemented for TLS streams" - ) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.transport_stream.extra_attributes, - TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, - TLSAttribute.channel_binding_tls_unique: ( - self._ssl_object.get_channel_binding - ), - TLSAttribute.cipher: self._ssl_object.cipher, - TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), - TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( - True - ), - TLSAttribute.server_side: lambda: self._ssl_object.server_side, - TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() - if self._ssl_object.server_side - else None, - TLSAttribute.standard_compatible: lambda: self.standard_compatible, - TLSAttribute.ssl_object: lambda: self._ssl_object, - TLSAttribute.tls_version: self._ssl_object.version, - } - - -@dataclass(eq=False) -class TLSListener(Listener[TLSStream]): - """ - A convenience listener that wraps another listener and auto-negotiates a TLS session - on every accepted connection. - - If the TLS handshake times out or raises an exception, - :meth:`handle_handshake_error` is called to do whatever post-mortem processing is - deemed necessary. - - Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. - - :param Listener listener: the listener to wrap - :param ssl_context: the SSL context object - :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` - :param handshake_timeout: time limit for the TLS handshake - (passed to :func:`~anyio.fail_after`) - """ - - listener: Listener[Any] - ssl_context: ssl.SSLContext - standard_compatible: bool = True - handshake_timeout: float = 30 - - @staticmethod - async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: - """ - Handle an exception raised during the TLS handshake. - - This method does 3 things: - - #. Forcefully closes the original stream - #. Logs the exception (unless it was a cancellation exception) using the - ``anyio.streams.tls`` logger - #. Reraises the exception if it was a base exception or a cancellation exception - - :param exc: the exception - :param stream: the original stream - - """ - await aclose_forcefully(stream) - - # Log all except cancellation exceptions - if not isinstance(exc, get_cancelled_exc_class()): - # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using - # any asyncio implementation, so we explicitly pass the exception to log - # (https://github.com/python/cpython/issues/108668). Trio does not have this - # issue because it works around the CPython bug. - logging.getLogger(__name__).exception( - "Error during TLS handshake", exc_info=exc - ) - - # Only reraise base exceptions and cancellation exceptions - if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): - raise - - async def serve( - self, - handler: Callable[[TLSStream], Any], - task_group: TaskGroup | None = None, - ) -> None: - @wraps(handler) - async def handler_wrapper(stream: AnyByteStream) -> None: - from .. import fail_after - - try: - with fail_after(self.handshake_timeout): - wrapped_stream = await TLSStream.wrap( - stream, - ssl_context=self.ssl_context, - standard_compatible=self.standard_compatible, - ) - except BaseException as exc: - await self.handle_handshake_error(exc, stream) - else: - await handler(wrapped_stream) - - await self.listener.serve(handler_wrapper, task_group) - - async def aclose(self) -> None: - await self.listener.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - TLSAttribute.standard_compatible: lambda: self.standard_compatible, - } diff --git a/venv/lib/python3.12/site-packages/anyio/to_interpreter.py b/venv/lib/python3.12/site-packages/anyio/to_interpreter.py deleted file mode 100644 index 8a2e993a..00000000 --- a/venv/lib/python3.12/site-packages/anyio/to_interpreter.py +++ /dev/null @@ -1,218 +0,0 @@ -from __future__ import annotations - -import atexit -import os -import pickle -import sys -from collections import deque -from collections.abc import Callable -from textwrap import dedent -from typing import Any, Final, TypeVar - -from . import current_time, to_thread -from ._core._exceptions import BrokenWorkerIntepreter -from ._core._synchronization import CapacityLimiter -from .lowlevel import RunVar - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -UNBOUND: Final = 2 # I have no clue how this works, but it was used in the stdlib -FMT_UNPICKLED: Final = 0 -FMT_PICKLED: Final = 1 -DEFAULT_CPU_COUNT: Final = 8 # this is just an arbitrarily selected value -MAX_WORKER_IDLE_TIME = ( - 30 # seconds a subinterpreter can be idle before becoming eligible for pruning -) - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") - -_idle_workers = RunVar[deque["Worker"]]("_available_workers") -_default_interpreter_limiter = RunVar[CapacityLimiter]("_default_interpreter_limiter") - - -class Worker: - _run_func = compile( - dedent(""" - import _interpqueues as queues - import _interpreters as interpreters - from pickle import loads, dumps, HIGHEST_PROTOCOL - - item = queues.get(queue_id)[0] - try: - func, args = loads(item) - retval = func(*args) - except BaseException as exc: - is_exception = True - retval = exc - else: - is_exception = False - - try: - queues.put(queue_id, (retval, is_exception), FMT_UNPICKLED, UNBOUND) - except interpreters.NotShareableError: - retval = dumps(retval, HIGHEST_PROTOCOL) - queues.put(queue_id, (retval, is_exception), FMT_PICKLED, UNBOUND) - """), - "", - "exec", - ) - - last_used: float = 0 - - _initialized: bool = False - _interpreter_id: int - _queue_id: int - - def initialize(self) -> None: - import _interpqueues as queues - import _interpreters as interpreters - - self._interpreter_id = interpreters.create() - self._queue_id = queues.create(2, FMT_UNPICKLED, UNBOUND) - self._initialized = True - interpreters.set___main___attrs( - self._interpreter_id, - { - "queue_id": self._queue_id, - "FMT_PICKLED": FMT_PICKLED, - "FMT_UNPICKLED": FMT_UNPICKLED, - "UNBOUND": UNBOUND, - }, - ) - - def destroy(self) -> None: - import _interpqueues as queues - import _interpreters as interpreters - - if self._initialized: - interpreters.destroy(self._interpreter_id) - queues.destroy(self._queue_id) - - def _call( - self, - func: Callable[..., T_Retval], - args: tuple[Any], - ) -> tuple[Any, bool]: - import _interpqueues as queues - import _interpreters as interpreters - - if not self._initialized: - self.initialize() - - payload = pickle.dumps((func, args), pickle.HIGHEST_PROTOCOL) - queues.put(self._queue_id, payload, FMT_PICKLED, UNBOUND) - - res: Any - is_exception: bool - if exc_info := interpreters.exec(self._interpreter_id, self._run_func): - raise BrokenWorkerIntepreter(exc_info) - - (res, is_exception), fmt = queues.get(self._queue_id)[:2] - if fmt == FMT_PICKLED: - res = pickle.loads(res) - - return res, is_exception - - async def call( - self, - func: Callable[..., T_Retval], - args: tuple[Any], - limiter: CapacityLimiter, - ) -> T_Retval: - result, is_exception = await to_thread.run_sync( - self._call, - func, - args, - limiter=limiter, - ) - if is_exception: - raise result - - return result - - -def _stop_workers(workers: deque[Worker]) -> None: - for worker in workers: - worker.destroy() - - workers.clear() - - -async def run_sync( - func: Callable[[Unpack[PosArgsT]], T_Retval], - *args: Unpack[PosArgsT], - limiter: CapacityLimiter | None = None, -) -> T_Retval: - """ - Call the given function with the given arguments in a subinterpreter. - - If the ``cancellable`` option is enabled and the task waiting for its completion is - cancelled, the call will still run its course but its return value (or any raised - exception) will be ignored. - - .. warning:: This feature is **experimental**. The upstream interpreter API has not - yet been finalized or thoroughly tested, so don't rely on this for anything - mission critical. - - :param func: a callable - :param args: positional arguments for the callable - :param limiter: capacity limiter to use to limit the total amount of subinterpreters - running (if omitted, the default limiter is used) - :return: the result of the call - :raises BrokenWorkerIntepreter: if there's an internal error in a subinterpreter - - """ - if sys.version_info <= (3, 13): - raise RuntimeError("subinterpreters require at least Python 3.13") - - if limiter is None: - limiter = current_default_interpreter_limiter() - - try: - idle_workers = _idle_workers.get() - except LookupError: - idle_workers = deque() - _idle_workers.set(idle_workers) - atexit.register(_stop_workers, idle_workers) - - async with limiter: - try: - worker = idle_workers.pop() - except IndexError: - worker = Worker() - - try: - return await worker.call(func, args, limiter) - finally: - # Prune workers that have been idle for too long - now = current_time() - while idle_workers: - if now - idle_workers[0].last_used <= MAX_WORKER_IDLE_TIME: - break - - await to_thread.run_sync(idle_workers.popleft().destroy, limiter=limiter) - - worker.last_used = current_time() - idle_workers.append(worker) - - -def current_default_interpreter_limiter() -> CapacityLimiter: - """ - Return the capacity limiter that is used by default to limit the number of - concurrently running subinterpreters. - - Defaults to the number of CPU cores. - - :return: a capacity limiter object - - """ - try: - return _default_interpreter_limiter.get() - except LookupError: - limiter = CapacityLimiter(os.cpu_count() or DEFAULT_CPU_COUNT) - _default_interpreter_limiter.set(limiter) - return limiter diff --git a/venv/lib/python3.12/site-packages/anyio/to_process.py b/venv/lib/python3.12/site-packages/anyio/to_process.py deleted file mode 100644 index 495de2ae..00000000 --- a/venv/lib/python3.12/site-packages/anyio/to_process.py +++ /dev/null @@ -1,258 +0,0 @@ -from __future__ import annotations - -import os -import pickle -import subprocess -import sys -from collections import deque -from collections.abc import Callable -from importlib.util import module_from_spec, spec_from_file_location -from typing import TypeVar, cast - -from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class -from ._core._exceptions import BrokenWorkerProcess -from ._core._subprocesses import open_process -from ._core._synchronization import CapacityLimiter -from ._core._tasks import CancelScope, fail_after -from .abc import ByteReceiveStream, ByteSendStream, Process -from .lowlevel import RunVar, checkpoint_if_cancelled -from .streams.buffered import BufferedByteReceiveStream - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -WORKER_MAX_IDLE_TIME = 300 # 5 minutes - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") - -_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") -_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( - "_process_pool_idle_workers" -) -_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") - - -async def run_sync( # type: ignore[return] - func: Callable[[Unpack[PosArgsT]], T_Retval], - *args: Unpack[PosArgsT], - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - """ - Call the given function with the given arguments in a worker process. - - If the ``cancellable`` option is enabled and the task waiting for its completion is - cancelled, the worker process running it will be abruptly terminated using SIGKILL - (or ``terminateProcess()`` on Windows). - - :param func: a callable - :param args: positional arguments for the callable - :param cancellable: ``True`` to allow cancellation of the operation while it's - running - :param limiter: capacity limiter to use to limit the total amount of processes - running (if omitted, the default limiter is used) - :return: an awaitable that yields the return value of the function. - - """ - - async def send_raw_command(pickled_cmd: bytes) -> object: - try: - await stdin.send(pickled_cmd) - response = await buffered.receive_until(b"\n", 50) - status, length = response.split(b" ") - if status not in (b"RETURN", b"EXCEPTION"): - raise RuntimeError( - f"Worker process returned unexpected response: {response!r}" - ) - - pickled_response = await buffered.receive_exactly(int(length)) - except BaseException as exc: - workers.discard(process) - try: - process.kill() - with CancelScope(shield=True): - await process.aclose() - except ProcessLookupError: - pass - - if isinstance(exc, get_cancelled_exc_class()): - raise - else: - raise BrokenWorkerProcess from exc - - retval = pickle.loads(pickled_response) - if status == b"EXCEPTION": - assert isinstance(retval, BaseException) - raise retval - else: - return retval - - # First pickle the request before trying to reserve a worker process - await checkpoint_if_cancelled() - request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) - - # If this is the first run in this event loop thread, set up the necessary variables - try: - workers = _process_pool_workers.get() - idle_workers = _process_pool_idle_workers.get() - except LookupError: - workers = set() - idle_workers = deque() - _process_pool_workers.set(workers) - _process_pool_idle_workers.set(idle_workers) - get_async_backend().setup_process_pool_exit_at_shutdown(workers) - - async with limiter or current_default_process_limiter(): - # Pop processes from the pool (starting from the most recently used) until we - # find one that hasn't exited yet - process: Process - while idle_workers: - process, idle_since = idle_workers.pop() - if process.returncode is None: - stdin = cast(ByteSendStream, process.stdin) - buffered = BufferedByteReceiveStream( - cast(ByteReceiveStream, process.stdout) - ) - - # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME - # seconds or longer - now = current_time() - killed_processes: list[Process] = [] - while idle_workers: - if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: - break - - process_to_kill, idle_since = idle_workers.popleft() - process_to_kill.kill() - workers.remove(process_to_kill) - killed_processes.append(process_to_kill) - - with CancelScope(shield=True): - for killed_process in killed_processes: - await killed_process.aclose() - - break - - workers.remove(process) - else: - command = [sys.executable, "-u", "-m", __name__] - process = await open_process( - command, stdin=subprocess.PIPE, stdout=subprocess.PIPE - ) - try: - stdin = cast(ByteSendStream, process.stdin) - buffered = BufferedByteReceiveStream( - cast(ByteReceiveStream, process.stdout) - ) - with fail_after(20): - message = await buffered.receive(6) - - if message != b"READY\n": - raise BrokenWorkerProcess( - f"Worker process returned unexpected response: {message!r}" - ) - - main_module_path = getattr(sys.modules["__main__"], "__file__", None) - pickled = pickle.dumps( - ("init", sys.path, main_module_path), - protocol=pickle.HIGHEST_PROTOCOL, - ) - await send_raw_command(pickled) - except (BrokenWorkerProcess, get_cancelled_exc_class()): - raise - except BaseException as exc: - process.kill() - raise BrokenWorkerProcess( - "Error during worker process initialization" - ) from exc - - workers.add(process) - - with CancelScope(shield=not cancellable): - try: - return cast(T_Retval, await send_raw_command(request)) - finally: - if process in workers: - idle_workers.append((process, current_time())) - - -def current_default_process_limiter() -> CapacityLimiter: - """ - Return the capacity limiter that is used by default to limit the number of worker - processes. - - :return: a capacity limiter object - - """ - try: - return _default_process_limiter.get() - except LookupError: - limiter = CapacityLimiter(os.cpu_count() or 2) - _default_process_limiter.set(limiter) - return limiter - - -def process_worker() -> None: - # Redirect standard streams to os.devnull so that user code won't interfere with the - # parent-worker communication - stdin = sys.stdin - stdout = sys.stdout - sys.stdin = open(os.devnull) - sys.stdout = open(os.devnull, "w") - - stdout.buffer.write(b"READY\n") - while True: - retval = exception = None - try: - command, *args = pickle.load(stdin.buffer) - except EOFError: - return - except BaseException as exc: - exception = exc - else: - if command == "run": - func, args = args - try: - retval = func(*args) - except BaseException as exc: - exception = exc - elif command == "init": - main_module_path: str | None - sys.path, main_module_path = args - del sys.modules["__main__"] - if main_module_path and os.path.isfile(main_module_path): - # Load the parent's main module but as __mp_main__ instead of - # __main__ (like multiprocessing does) to avoid infinite recursion - try: - spec = spec_from_file_location("__mp_main__", main_module_path) - if spec and spec.loader: - main = module_from_spec(spec) - spec.loader.exec_module(main) - sys.modules["__main__"] = main - except BaseException as exc: - exception = exc - try: - if exception is not None: - status = b"EXCEPTION" - pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) - else: - status = b"RETURN" - pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) - except BaseException as exc: - exception = exc - status = b"EXCEPTION" - pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) - - stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) - stdout.buffer.write(pickled) - - # Respect SIGTERM - if isinstance(exception, SystemExit): - raise exception - - -if __name__ == "__main__": - process_worker() diff --git a/venv/lib/python3.12/site-packages/anyio/to_thread.py b/venv/lib/python3.12/site-packages/anyio/to_thread.py deleted file mode 100644 index 5070516e..00000000 --- a/venv/lib/python3.12/site-packages/anyio/to_thread.py +++ /dev/null @@ -1,69 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import Callable -from typing import TypeVar -from warnings import warn - -from ._core._eventloop import get_async_backend -from .abc import CapacityLimiter - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") - - -async def run_sync( - func: Callable[[Unpack[PosArgsT]], T_Retval], - *args: Unpack[PosArgsT], - abandon_on_cancel: bool = False, - cancellable: bool | None = None, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - """ - Call the given function with the given arguments in a worker thread. - - If the ``cancellable`` option is enabled and the task waiting for its completion is - cancelled, the thread will still run its course but its return value (or any raised - exception) will be ignored. - - :param func: a callable - :param args: positional arguments for the callable - :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run - unchecked on own) if the host task is cancelled, ``False`` to ignore - cancellations in the host task until the operation has completed in the worker - thread - :param cancellable: deprecated alias of ``abandon_on_cancel``; will override - ``abandon_on_cancel`` if both parameters are passed - :param limiter: capacity limiter to use to limit the total amount of threads running - (if omitted, the default limiter is used) - :return: an awaitable that yields the return value of the function. - - """ - if cancellable is not None: - abandon_on_cancel = cancellable - warn( - "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " - "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", - DeprecationWarning, - stacklevel=2, - ) - - return await get_async_backend().run_sync_in_worker_thread( - func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter - ) - - -def current_default_thread_limiter() -> CapacityLimiter: - """ - Return the capacity limiter that is used by default to limit the number of - concurrent threads. - - :return: a capacity limiter object - - """ - return get_async_backend().current_default_thread_limiter() diff --git a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/METADATA deleted file mode 100644 index 96973ac9..00000000 --- a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/METADATA +++ /dev/null @@ -1,203 +0,0 @@ -Metadata-Version: 2.4 -Name: beanie -Version: 2.0.0 -Summary: Asynchronous Python ODM for MongoDB -Keywords: mongodb,odm,orm,pydantic,mongo,async,python -Author-email: Roman Right -Requires-Python: >=3.9,<4.0 -Description-Content-Type: text/markdown -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Topic :: Database -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -License-File: LICENSE -Requires-Dist: pydantic>=1.10.18,<3.0 -Requires-Dist: click>=7 -Requires-Dist: lazy-model==0.3.0 -Requires-Dist: pymongo>=4.11.0,<5.0.0 -Requires-Dist: typing-extensions>=4.7 -Requires-Dist: pymongo[aws]>=4.11.0,<5.0.0 ; extra == "aws" -Requires-Dist: tomli>=2.2.1,<3.0.0 ; extra == "ci" and ( python_version < '3.11') -Requires-Dist: tomli-w>=1.0.0,<2.0.0 ; extra == "ci" -Requires-Dist: requests ; extra == "ci" -Requires-Dist: types-requests ; extra == "ci" -Requires-Dist: Pygments>=2.8.0 ; extra == "doc" -Requires-Dist: Markdown>=3.3 ; extra == "doc" -Requires-Dist: pydoc-markdown>=4.8 ; extra == "doc" -Requires-Dist: mkdocs>=1.4 ; extra == "doc" -Requires-Dist: mkdocs-material>=9.0 ; extra == "doc" -Requires-Dist: jinja2>=3.0.3 ; extra == "doc" -Requires-Dist: pymongo[encryption]>=4.11.0,<5.0.0 ; extra == "encryption" -Requires-Dist: pymongo[gssapi]>=4.11.0,<5.0.0 ; extra == "gssapi" -Requires-Dist: pymongo[ocsp]>=4.11.0,<5.0.0 ; extra == "ocsp" -Requires-Dist: beanie-batteries-queue>=0.2 ; extra == "queue" -Requires-Dist: pymongo[snappy]>=4.11.0,<5.0.0 ; extra == "snappy" -Requires-Dist: pre-commit>=3.5.0 ; extra == "test" -Requires-Dist: pytest>=8.3.3 ; extra == "test" -Requires-Dist: pytest-asyncio>=0.24.0 ; extra == "test" -Requires-Dist: pytest-cov>=5.0.0 ; extra == "test" -Requires-Dist: dnspython>=2.1.0 ; extra == "test" -Requires-Dist: pyright>=0 ; extra == "test" -Requires-Dist: asgi-lifespan>=1.0.1 ; extra == "test" -Requires-Dist: httpx>=0.23.0 ; extra == "test" -Requires-Dist: fastapi>=0.100 ; extra == "test" -Requires-Dist: pydantic-settings>=2 ; extra == "test" -Requires-Dist: pydantic-extra-types>=2 ; extra == "test" -Requires-Dist: pydantic[email] ; extra == "test" -Requires-Dist: pymongo[zstd]>=4.11.0,<5.0.0 ; extra == "zstd" -Project-URL: homepage, https://beanie-odm.dev -Project-URL: repository, https://github.com/roman-right/beanie -Provides-Extra: aws -Provides-Extra: ci -Provides-Extra: doc -Provides-Extra: encryption -Provides-Extra: gssapi -Provides-Extra: ocsp -Provides-Extra: queue -Provides-Extra: snappy -Provides-Extra: test -Provides-Extra: zstd - -[![Beanie](https://raw.githubusercontent.com/roman-right/beanie/main/assets/logo/white_bg.svg)](https://github.com/roman-right/beanie) - -[![shields badge](https://shields.io/badge/-docs-blue)](https://beanie-odm.dev) -[![pypi](https://img.shields.io/pypi/v/beanie.svg)](https://pypi.python.org/pypi/beanie) -[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/BeanieODM/beanie/main.svg)](https://results.pre-commit.ci/latest/github/BeanieODM/beanie/main) - - - -## 📢 Important Update 📢 - -We are excited to announce that Beanie is transitioning from solo development to a team-based approach! This move will help us enhance the project with new features and more collaborative development. - -At this moment we are establishing a board of members that will decide all the future steps of the project. We are looking for contributors and maintainers to join the board. - -### Join Us -If you are interested in contributing or want to stay updated, please join our Discord channel. We're looking forward to your ideas and contributions! - -[Join our Discord](https://discord.gg/AwwTrbCASP) - -Let’s make Beanie better, together! - -## Overview - -[Beanie](https://github.com/roman-right/beanie) - is an asynchronous Python object-document mapper (ODM) for MongoDB. Data models are based on [Pydantic](https://pydantic-docs.helpmanual.io/). - -When using Beanie each database collection has a corresponding `Document` that -is used to interact with that collection. In addition to retrieving data, -Beanie allows you to add, update, or delete documents from the collection as -well. - -Beanie saves you time by removing boilerplate code, and it helps you focus on -the parts of your app that actually matter. - -Data and schema migrations are supported by Beanie out of the box. - -There is a synchronous version of Beanie ODM - [Bunnet](https://github.com/roman-right/bunnet) - -## Installation - -### PIP - -```shell -pip install beanie -``` - -### Poetry - -```shell -poetry add beanie -``` - -For more installation options (eg: `aws`, `gcp`, `srv` ...) you can look in the [getting started](./docs/getting-started.md#optional-dependencies) - -## Example - -```python -import asyncio -from typing import Optional - -from pymongo import AsyncMongoClient -from pydantic import BaseModel - -from beanie import Document, Indexed, init_beanie - - -class Category(BaseModel): - name: str - description: str - - -class Product(Document): - name: str # You can use normal types just like in pydantic - description: Optional[str] = None - price: Indexed(float) # You can also specify that a field should correspond to an index - category: Category # You can include pydantic models as well - - -# This is an asynchronous example, so we will access it from an async function -async def example(): - # Beanie uses PyMongo async client under the hood - client = AsyncMongoClient("mongodb://user:pass@host:27017") - - # Initialize beanie with the Product document class - await init_beanie(database=client.db_name, document_models=[Product]) - - chocolate = Category(name="Chocolate", description="A preparation of roasted and ground cacao seeds.") - # Beanie documents work just like pydantic models - tonybar = Product(name="Tony's", price=5.95, category=chocolate) - # And can be inserted into the database - await tonybar.insert() - - # You can find documents with pythonic syntax - product = await Product.find_one(Product.price < 10) - - # And update them - await product.set({Product.name:"Gold bar"}) - - -if __name__ == "__main__": - asyncio.run(example()) -``` - -## Links - -### Documentation - -- **[Doc](https://beanie-odm.dev/)** - Tutorial, API documentation, and development guidelines. - -### Example Projects - -- **[fastapi-cosmos-beanie](https://github.com/tonybaloney/ants-azure-demos/tree/master/fastapi-cosmos-beanie)** - FastAPI + Beanie ODM + Azure Cosmos Demo Application by [Anthony Shaw](https://github.com/tonybaloney) -- **[fastapi-beanie-jwt](https://github.com/flyinactor91/fastapi-beanie-jwt)** - - Sample FastAPI server with JWT auth and Beanie ODM by [Michael duPont](https://github.com/flyinactor91) -- **[Shortify](https://github.com/IHosseini083/Shortify)** - URL shortener RESTful API (FastAPI + Beanie ODM + JWT & OAuth2) by [ -Iliya Hosseini](https://github.com/IHosseini083) -- **[LCCN Predictor](https://github.com/baoliay2008/lccn_predictor)** - Leetcode contest rating predictor (FastAPI + Beanie ODM + React) by [L. Bao](https://github.com/baoliay2008) - -### Articles - -- **[Announcing Beanie - MongoDB ODM](https://dev.to/romanright/announcing-beanie-mongodb-odm-56e)** -- **[Build a Cocktail API with Beanie and MongoDB](https://developer.mongodb.com/article/beanie-odm-fastapi-cocktails/)** -- **[MongoDB indexes with Beanie](https://dev.to/romanright/mongodb-indexes-with-beanie-43e8)** -- **[Beanie Projections. Reducing network and database load.](https://dev.to/romanright/beanie-projections-reducing-network-and-database-load-3bih)** -- **[Beanie 1.0 - Query Builder](https://dev.to/romanright/announcing-beanie-1-0-mongodb-odm-with-query-builder-4mbl)** -- **[Beanie 1.8 - Relations, Cache, Actions and more!](https://dev.to/romanright/announcing-beanie-odm-18-relations-cache-actions-and-more-24ef)** - -### Resources - -- **[GitHub](https://github.com/roman-right/beanie)** - GitHub page of the - project -- **[Changelog](https://beanie-odm.dev/changelog)** - list of all - the valuable changes -- **[Discord](https://discord.gg/AwwTrbCASP)** - ask your questions, share - ideas or just say `Hello!!` - ----- -Supported by [JetBrains](https://jb.gg/OpenSource) - -[![JetBrains](https://raw.githubusercontent.com/roman-right/beanie/main/assets/logo/jetbrains.svg)](https://jb.gg/OpenSource) - diff --git a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/RECORD deleted file mode 100644 index 3416724a..00000000 --- a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/RECORD +++ /dev/null @@ -1,169 +0,0 @@ -../../../bin/beanie,sha256=-vUn6tMDkdRWHwaw_QK6VTZ6lv1Yy1HF9nFAp6NinFo,263 -beanie-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -beanie-2.0.0.dist-info/METADATA,sha256=rM0d1P488zYmTTaogZkhY--DpfvVQ0VD4gQvc7rzcDI,8455 -beanie-2.0.0.dist-info/RECORD,, -beanie-2.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie-2.0.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -beanie-2.0.0.dist-info/entry_points.txt,sha256=ibm9-Xt7ArP7AT8npK5OgySxjIFIvVl8L-4gT9wn9YY,62 -beanie-2.0.0.dist-info/licenses/LICENSE,sha256=o3wU6WwtMthT67xVQc_Mm2S_NimYzEB2uJHRYC_RvoU,11343 -beanie/__init__.py,sha256=MYDDTMzlqs4HDyMn2z5Sxi_8pO3Nh6PUHC5-dGKFnsw,1788 -beanie/__pycache__/__init__.cpython-312.pyc,, -beanie/__pycache__/exceptions.cpython-312.pyc,, -beanie/__pycache__/operators.cpython-312.pyc,, -beanie/exceptions.py,sha256=I_RZOBqAzq7bOYJKPaaNExiSsjm5T-0NJQNVAk33JIU,941 -beanie/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/executors/__pycache__/__init__.cpython-312.pyc,, -beanie/executors/__pycache__/migrate.cpython-312.pyc,, -beanie/executors/migrate.py,sha256=EG2JDcTsU5LNxAcba1EJG9kRRvhoEwGblLWJ3b9gjhM,6703 -beanie/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/migrations/__pycache__/__init__.cpython-312.pyc,, -beanie/migrations/__pycache__/database.cpython-312.pyc,, -beanie/migrations/__pycache__/models.cpython-312.pyc,, -beanie/migrations/__pycache__/runner.cpython-312.pyc,, -beanie/migrations/__pycache__/template.cpython-312.pyc,, -beanie/migrations/__pycache__/utils.cpython-312.pyc,, -beanie/migrations/controllers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/migrations/controllers/__pycache__/__init__.cpython-312.pyc,, -beanie/migrations/controllers/__pycache__/base.cpython-312.pyc,, -beanie/migrations/controllers/__pycache__/free_fall.cpython-312.pyc,, -beanie/migrations/controllers/__pycache__/iterative.cpython-312.pyc,, -beanie/migrations/controllers/base.py,sha256=019yXtLTYWoIyiboaQbMx3i3uNiEMqvXYuvDPcuWOZ8,376 -beanie/migrations/controllers/free_fall.py,sha256=4k5Yp-HxDf9DV2uIwljNJ2UZspAMSM3sA_MDNkBlvS0,964 -beanie/migrations/controllers/iterative.py,sha256=OFm53sSedLMP7rzcU7wmKOlv-0nZlIVY_q_vqEVsn0E,4939 -beanie/migrations/database.py,sha256=Lnj0iglrEBdphOwZfD9pK0coXXwZZZfjZOebkBPPNPY,401 -beanie/migrations/models.py,sha256=tMqMHVNp9AsyePxcKcXI1pyjPmk0BaOyw_LB3BIwndk,658 -beanie/migrations/runner.py,sha256=BhoEu_5gm2R84gr_YJEeppmEyIP0T3ZpMAf879tk0sU,8739 -beanie/migrations/template.py,sha256=0tPXNtSFlG9szHdELqa3fpf7PuoaiStTSfHX7p9rdiQ,41 -beanie/migrations/utils.py,sha256=RldAhYoIwnygKeDAKTiBjVYcSakdEVY32aH4bKiG-_4,178 -beanie/odm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/odm/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/__pycache__/actions.cpython-312.pyc,, -beanie/odm/__pycache__/bulk.cpython-312.pyc,, -beanie/odm/__pycache__/cache.cpython-312.pyc,, -beanie/odm/__pycache__/documents.cpython-312.pyc,, -beanie/odm/__pycache__/enums.cpython-312.pyc,, -beanie/odm/__pycache__/fields.cpython-312.pyc,, -beanie/odm/__pycache__/models.cpython-312.pyc,, -beanie/odm/__pycache__/registry.cpython-312.pyc,, -beanie/odm/__pycache__/union_doc.cpython-312.pyc,, -beanie/odm/__pycache__/views.cpython-312.pyc,, -beanie/odm/actions.py,sha256=tJf7VQOzb3iKKBsv8net9CMgrd52TOP78FY-ji0P4gI,6998 -beanie/odm/bulk.py,sha256=4w-jzXbzngB9zbVkdHy3Nptet1DRNJnH_tCTfOTv6mg,6405 -beanie/odm/cache.py,sha256=ME4vccY_hjq5OxRiW0M3wEnXe6klGOobgbGPTMUqm6I,1316 -beanie/odm/custom_types/__init__.py,sha256=vA1ThxycXqIBkpSgAxNPBD_RAm7iXA-O-NR9i6J4mPA,238 -beanie/odm/custom_types/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/custom_types/__pycache__/decimal.cpython-312.pyc,, -beanie/odm/custom_types/__pycache__/re.cpython-312.pyc,, -beanie/odm/custom_types/bson/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/odm/custom_types/bson/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/custom_types/bson/__pycache__/binary.cpython-312.pyc,, -beanie/odm/custom_types/bson/binary.py,sha256=ctxbjPnvmrQZ2VbQZB2CA_hg4jf3m2oKzmkfCX_1pFs,548 -beanie/odm/custom_types/decimal.py,sha256=aYpjfWzZCZyNaMMStwevUhlz9SYcORgr9eciLOC81Zk,250 -beanie/odm/custom_types/re.py,sha256=pBw8Qmav30s9LboAi2zvVWzoWvlcOAq615wVjdoStTA,567 -beanie/odm/documents.py,sha256=IRPvSaP58VEAs38Niujmmkd64sxWUAMkaJFheIyhzIU,48507 -beanie/odm/enums.py,sha256=q5ljZBOnmekSgHAY3Fb6Ry_pRVZgYIcMzllbJUQlhmQ,312 -beanie/odm/fields.py,sha256=A63Iy9-cCGxSJ5Psm9hVv98va1mnmddE3l8j3yrAI4g,22229 -beanie/odm/interfaces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/odm/interfaces/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/aggregate.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/aggregation_methods.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/clone.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/detector.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/find.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/getters.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/inheritance.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/session.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/setters.cpython-312.pyc,, -beanie/odm/interfaces/__pycache__/update.cpython-312.pyc,, -beanie/odm/interfaces/aggregate.py,sha256=K2wFUb3oI-ePeTvQqC8lB_KBwyBPaN76jp2yMf1hVb8,2384 -beanie/odm/interfaces/aggregation_methods.py,sha256=pHXqzxAnaMZ7TinztcIDax3ZmDbNf7DUKGUW7NobRus,5414 -beanie/odm/interfaces/clone.py,sha256=bBxLCEy9haI53I5jI23TanSGqP2eezv_pmumUHo7B0Y,101 -beanie/odm/interfaces/detector.py,sha256=CiXJwyFRH5CNz1-XXyn4ius_wc6UIbO5aEQv-vOeYFA,243 -beanie/odm/interfaces/find.py,sha256=kIJ5gJUS75umNpCU235Se1_rcQOTIrXJCO-GtHXrmto,17610 -beanie/odm/interfaces/getters.py,sha256=gEty7zbBEN8h_QVLy14sNDzN8y61zP0WLlxvim5Zan8,674 -beanie/odm/interfaces/inheritance.py,sha256=MghGZufO2-f5xs9x5cAZfyFA7045QX70XDTuMl4zhP4,448 -beanie/odm/interfaces/session.py,sha256=NFhuYpJZ5rzlUbYrp25eozGgIXIl-8k4ZxXjwmbS5gI,479 -beanie/odm/interfaces/setters.py,sha256=fKxxeLEXQ1adRQP71BnE-fAnUR_cTpD_hwaCaEpw08k,689 -beanie/odm/interfaces/update.py,sha256=677Lb_riiA7TLdQL13lC45eMqYmg2YcmJ29eHEjg-dY,3142 -beanie/odm/models.py,sha256=9b0oEMOe9fDf-R7zKf0fJzQyWeSVgIPc-dEBngegu9I,464 -beanie/odm/operators/__init__.py,sha256=uhE9EP1Uh6sEzPjFcnu-hlhlDhROMJOm55kA2eHrymc,816 -beanie/odm/operators/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/operators/find/__init__.py,sha256=k56w7kJxnlRI6kyxNyYhfbhBkwLShNfCqez-tSJcZ8Y,116 -beanie/odm/operators/find/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/operators/find/__pycache__/array.cpython-312.pyc,, -beanie/odm/operators/find/__pycache__/bitwise.cpython-312.pyc,, -beanie/odm/operators/find/__pycache__/comparison.cpython-312.pyc,, -beanie/odm/operators/find/__pycache__/element.cpython-312.pyc,, -beanie/odm/operators/find/__pycache__/evaluation.cpython-312.pyc,, -beanie/odm/operators/find/__pycache__/geospatial.cpython-312.pyc,, -beanie/odm/operators/find/__pycache__/logical.cpython-312.pyc,, -beanie/odm/operators/find/array.py,sha256=0MHD9L1S_kHnJ_I32uGf0EdbD7lFrAwH_U_zhvdizKY,2185 -beanie/odm/operators/find/bitwise.py,sha256=R-t61Hkv3pBx0nQULTaUoEL7ds6vl21eI05BxH5nT3M,1297 -beanie/odm/operators/find/comparison.py,sha256=g1wXntgfAcMy0qz9Rp4H-jdkmsPIBinvUkGAb2y_ilc,3533 -beanie/odm/operators/find/element.py,sha256=2QgMM9Azm0jpwUmCXIqqGnGR1nZ6SfmlW2QyDJspNJE,1368 -beanie/odm/operators/find/evaluation.py,sha256=Lmb6dASsET_c_BOhCbCFaNHeHRFBaDmFY-2BiCzsPkM,4399 -beanie/odm/operators/find/geospatial.py,sha256=tSl4wim8mvRqyoQCLS-8WxmKNr645rm1hp6wK9TnMIQ,6987 -beanie/odm/operators/find/logical.py,sha256=GVUGmWmrVKOAE5wTMZ95BNdphnXo-iZN09glKmsOibM,3687 -beanie/odm/operators/update/__init__.py,sha256=ar5CgzgcAJbddZFgHwSrFwOT8Q36_eRq4T-6qPtIy3A,232 -beanie/odm/operators/update/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/operators/update/__pycache__/array.cpython-312.pyc,, -beanie/odm/operators/update/__pycache__/bitwise.cpython-312.pyc,, -beanie/odm/operators/update/__pycache__/general.cpython-312.pyc,, -beanie/odm/operators/update/array.py,sha256=IzKnpYgFSoR6nSb20VRqACvMbISkWNh9wIV4Mmp7i7Y,2469 -beanie/odm/operators/update/bitwise.py,sha256=dP2kApCVwv7fGvLm0quVbT0sFzNRfvcBTRaW5fn0Dqc,477 -beanie/odm/operators/update/general.py,sha256=0_96hRTvx4WrsN4uZsqcY7hvfSuSBP_SjJuD_NqLzwg,3937 -beanie/odm/queries/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/odm/queries/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/queries/__pycache__/aggregation.cpython-312.pyc,, -beanie/odm/queries/__pycache__/cursor.cpython-312.pyc,, -beanie/odm/queries/__pycache__/delete.cpython-312.pyc,, -beanie/odm/queries/__pycache__/find.cpython-312.pyc,, -beanie/odm/queries/__pycache__/update.cpython-312.pyc,, -beanie/odm/queries/aggregation.py,sha256=9vImf6F7oHlszpJGTCX56LxsyL0kffoiKcUM8u9bzeI,3313 -beanie/odm/queries/cursor.py,sha256=E9F_RnEi5gXPlbUbwHcRr7BEttflZKA1ctwlsln-rqM,2110 -beanie/odm/queries/delete.py,sha256=rwVbixI6xFMuAwEc7TYjgdlgKOB6jKyh4Cm-47Q-AAs,2537 -beanie/odm/queries/find.py,sha256=fiGOUVobkBQHYbBh6basj0ew7dwUR8kpguIOCb0Uu0Q,36640 -beanie/odm/queries/update.py,sha256=IXcxCzkOr9e5GrIderw8om_vgGp68npa4YgGDb6vSBQ,12606 -beanie/odm/registry.py,sha256=34hSbfsndIiy9TSzbF_UGzegB00QH-UnYU9r51iWfLs,870 -beanie/odm/settings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/odm/settings/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/settings/__pycache__/base.cpython-312.pyc,, -beanie/odm/settings/__pycache__/document.cpython-312.pyc,, -beanie/odm/settings/__pycache__/timeseries.cpython-312.pyc,, -beanie/odm/settings/__pycache__/union_doc.cpython-312.pyc,, -beanie/odm/settings/__pycache__/view.cpython-312.pyc,, -beanie/odm/settings/base.py,sha256=arFRERUf7CZr4dM9Oqc4lGzInCQSdS2LutpMlHudSls,1067 -beanie/odm/settings/document.py,sha256=0qjtUM6Cn0KufPPTaZEjNe47yEK59JxMROGDc_vi6G0,1112 -beanie/odm/settings/timeseries.py,sha256=KwSiyEQ8maEBJRT0oZ7F2FemCZF7xzc8PcAeq8ndY9g,1770 -beanie/odm/settings/union_doc.py,sha256=wGAPwec3IPYMFSvHpgW2nyU6Y7MO-wtP8GBUTGMy_ok,94 -beanie/odm/settings/view.py,sha256=UbQVYUbxpKLEl0zTooAO1_Ikouiu5cK_ZiJW43S_StI,328 -beanie/odm/union_doc.py,sha256=Q0Z9cTUEsHHXf5HtQyYVWcerTetwSRE7_WdspVvO8nw,2986 -beanie/odm/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/odm/utils/__pycache__/__init__.cpython-312.pyc,, -beanie/odm/utils/__pycache__/dump.cpython-312.pyc,, -beanie/odm/utils/__pycache__/encoder.cpython-312.pyc,, -beanie/odm/utils/__pycache__/find.cpython-312.pyc,, -beanie/odm/utils/__pycache__/general.cpython-312.pyc,, -beanie/odm/utils/__pycache__/init.cpython-312.pyc,, -beanie/odm/utils/__pycache__/parsing.cpython-312.pyc,, -beanie/odm/utils/__pycache__/projection.cpython-312.pyc,, -beanie/odm/utils/__pycache__/pydantic.cpython-312.pyc,, -beanie/odm/utils/__pycache__/relations.cpython-312.pyc,, -beanie/odm/utils/__pycache__/self_validation.cpython-312.pyc,, -beanie/odm/utils/__pycache__/state.cpython-312.pyc,, -beanie/odm/utils/__pycache__/typing.cpython-312.pyc,, -beanie/odm/utils/dump.py,sha256=oFBPwGnBGR0ZWvHJ0Lps2LBiDswsi5u9oyMwQwa4rBk,1280 -beanie/odm/utils/encoder.py,sha256=WvB15HOKH8_5mPUagFEyWPUZ2a2mThwc4zhUhwG1Sng,5261 -beanie/odm/utils/find.py,sha256=R7SnxkR06RShvsxLRMleZmwsqR1VWE6BYK5d-d6mrfs,16076 -beanie/odm/utils/general.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -beanie/odm/utils/init.py,sha256=HAoJV1i06F7xHvbwNm9tZoJnqRj45FPo_BeSDFYl6Hs,26987 -beanie/odm/utils/parsing.py,sha256=iTwj8w6yldndZGKXTTzAcIEb4vOLkvoq9vjKm1JW8Xo,5109 -beanie/odm/utils/projection.py,sha256=M_ZIg8a3tkT_DLeRgrvUHPd7G3FmFKD1lRJU1pIdQ_o,1151 -beanie/odm/utils/pydantic.py,sha256=E9SsLWFVFg9Ak4v4hbVrLgLZa3oRChf98u1jt5DHqoc,1592 -beanie/odm/utils/relations.py,sha256=rq6aSQ8AIz3-aM8MTHCfkMy3uQ8gNW7PsGT2Y6kO1Bg,1397 -beanie/odm/utils/self_validation.py,sha256=9MpgU1i01XHpmvmt3vgZdvB66tFFO-ViEXqnYlpcARo,544 -beanie/odm/utils/state.py,sha256=rplH57u-VIlTJ0ffUND0bevqs7hnQWwlUVqiBhiwnHQ,3210 -beanie/odm/utils/typing.py,sha256=m6yZG7fGgTOyYhZ78FOszVIJ-vCjEdBMh12WJhSoblo,2341 -beanie/odm/views.py,sha256=f63f71-R4K9F6Q8-IOPJUyV021UJ58T6-NjnjMYrGMQ,2050 -beanie/operators.py,sha256=3_wKo00oVtEdnjZmthFfCplOKpTafDWgarTihA0BYZ0,1845 -beanie/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/REQUESTED deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/WHEEL deleted file mode 100644 index d8b9936d..00000000 --- a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/entry_points.txt deleted file mode 100644 index ee7b5b77..00000000 --- a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -beanie=beanie.executors.migrate:migrations - diff --git a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/licenses/LICENSE deleted file mode 100644 index 308adaf3..00000000 --- a/venv/lib/python3.12/site-packages/beanie-2.0.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2021 Roman Korolev - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/lib/python3.12/site-packages/beanie/__init__.py b/venv/lib/python3.12/site-packages/beanie/__init__.py deleted file mode 100644 index a8909a44..00000000 --- a/venv/lib/python3.12/site-packages/beanie/__init__.py +++ /dev/null @@ -1,82 +0,0 @@ -from beanie.migrations.controllers.free_fall import free_fall_migration -from beanie.migrations.controllers.iterative import iterative_migration -from beanie.odm.actions import ( - After, - Before, - Delete, - Insert, - Replace, - Save, - SaveChanges, - Update, - ValidateOnSave, - after_event, - before_event, -) -from beanie.odm.bulk import BulkWriter -from beanie.odm.custom_types import DecimalAnnotation -from beanie.odm.custom_types.bson.binary import BsonBinary -from beanie.odm.documents import ( - Document, - DocumentWithSoftDelete, - MergeStrategy, -) -from beanie.odm.enums import SortDirection -from beanie.odm.fields import ( - BackLink, - BeanieObjectId, - DeleteRules, - Indexed, - Link, - PydanticObjectId, - WriteRules, -) -from beanie.odm.queries.update import UpdateResponse -from beanie.odm.settings.timeseries import Granularity, TimeSeriesConfig -from beanie.odm.union_doc import UnionDoc -from beanie.odm.utils.init import init_beanie -from beanie.odm.views import View - -__version__ = "2.0.0" -__all__ = [ - # ODM - "Document", - "DocumentWithSoftDelete", - "View", - "UnionDoc", - "init_beanie", - "PydanticObjectId", - "BeanieObjectId", - "Indexed", - "TimeSeriesConfig", - "Granularity", - "SortDirection", - "MergeStrategy", - # Actions - "before_event", - "after_event", - "Insert", - "Replace", - "Save", - "SaveChanges", - "ValidateOnSave", - "Delete", - "Before", - "After", - "Update", - # Bulk Write - "BulkWriter", - # Migrations - "iterative_migration", - "free_fall_migration", - # Relations - "Link", - "BackLink", - "WriteRules", - "DeleteRules", - # Custom Types - "DecimalAnnotation", - "BsonBinary", - # UpdateResponse - "UpdateResponse", -] diff --git a/venv/lib/python3.12/site-packages/beanie/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d9c82fca..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index 4366c760..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/__pycache__/operators.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/__pycache__/operators.cpython-312.pyc deleted file mode 100644 index 766b7b2a..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/__pycache__/operators.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/exceptions.py b/venv/lib/python3.12/site-packages/beanie/exceptions.py deleted file mode 100644 index 92739944..00000000 --- a/venv/lib/python3.12/site-packages/beanie/exceptions.py +++ /dev/null @@ -1,74 +0,0 @@ -class WrongDocumentUpdateStrategy(Exception): - pass - - -class DocumentNotFound(Exception): - pass - - -class DocumentAlreadyCreated(Exception): - pass - - -class DocumentWasNotSaved(Exception): - pass - - -class CollectionWasNotInitialized(Exception): - pass - - -class MigrationException(Exception): - pass - - -class ReplaceError(Exception): - pass - - -class StateManagementIsTurnedOff(Exception): - pass - - -class StateNotSaved(Exception): - pass - - -class RevisionIdWasChanged(Exception): - pass - - -class NotSupported(Exception): - pass - - -class MongoDBVersionError(Exception): - pass - - -class ViewWasNotInitialized(Exception): - pass - - -class ViewHasNoSettings(Exception): - pass - - -class UnionHasNoRegisteredDocs(Exception): - pass - - -class UnionDocNotInited(Exception): - pass - - -class DocWasNotRegisteredInUnionClass(Exception): - pass - - -class Deprecation(Exception): - pass - - -class ApplyChangesException(Exception): - pass diff --git a/venv/lib/python3.12/site-packages/beanie/executors/__init__.py b/venv/lib/python3.12/site-packages/beanie/executors/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/executors/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/executors/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4ea5fb85..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/executors/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/executors/__pycache__/migrate.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/executors/__pycache__/migrate.cpython-312.pyc deleted file mode 100644 index 1bf919f2..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/executors/__pycache__/migrate.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/executors/migrate.py b/venv/lib/python3.12/site-packages/beanie/executors/migrate.py deleted file mode 100644 index ad8eac66..00000000 --- a/venv/lib/python3.12/site-packages/beanie/executors/migrate.py +++ /dev/null @@ -1,232 +0,0 @@ -import asyncio -import logging -import os -import shutil -import sys -from datetime import datetime -from pathlib import Path -from typing import Any - -import click - -if sys.version_info >= (3, 11): - import tomllib -else: - import tomli as tomllib - -from beanie.migrations import template -from beanie.migrations.database import DBHandler -from beanie.migrations.models import RunningDirections, RunningMode -from beanie.migrations.runner import MigrationNode - -logging.basicConfig(format="%(message)s", level=logging.INFO) - - -class MigrationSettings: - def __init__(self, **kwargs: Any): - self.direction = ( - kwargs.get("direction") - or self.get_env_value("direction") - or self.get_from_toml("direction") - or RunningDirections.FORWARD - ) - - self.distance = int( - kwargs.get("distance") - or self.get_env_value("distance") - or self.get_from_toml("distance") - or 0 - ) - self.connection_uri = str( - kwargs.get("connection_uri") - or self.get_env_value("connection_uri") - or self.get_from_toml("connection_uri") - ) - self.database_name = str( - kwargs.get("database_name") - or self.get_env_value("database_name") - or self.get_from_toml("database_name") - ) - self.path = Path( - kwargs.get("path") - or self.get_env_value("path") - or self.get_from_toml("path") - ) - self.allow_index_dropping = bool( - kwargs.get("allow_index_dropping") - or self.get_env_value("allow_index_dropping") - or self.get_from_toml("allow_index_dropping") - or False - ) - self.use_transaction = bool(kwargs.get("use_transaction")) - - @staticmethod - def get_env_value(field_name) -> Any: - if field_name == "connection_uri": - value = ( - os.environ.get("BEANIE_URI") - or os.environ.get("BEANIE_CONNECTION_URI") - or os.environ.get("BEANIE_CONNECTION_STRING") - or os.environ.get("BEANIE_MONGODB_DSN") - or os.environ.get("BEANIE_MONGODB_URI") - or os.environ.get("beanie_uri") - or os.environ.get("beanie_connection_uri") - or os.environ.get("beanie_connection_string") - or os.environ.get("beanie_mongodb_dsn") - or os.environ.get("beanie_mongodb_uri") - ) - elif field_name == "database_name": - value = ( - os.environ.get("BEANIE_DB") - or os.environ.get("BEANIE_DB_NAME") - or os.environ.get("BEANIE_DATABASE_NAME") - or os.environ.get("beanie_db") - or os.environ.get("beanie_db_name") - or os.environ.get("beanie_database_name") - ) - else: - value = os.environ.get( - f"BEANIE_{field_name.upper()}" - ) or os.environ.get(f"beanie_{field_name.lower()}") - return value - - @staticmethod - def get_from_toml(field_name) -> Any: - path = Path("pyproject.toml") - if path.is_file(): - with path.open("rb") as f: - toml_data = tomllib.load(f) - val = ( - toml_data.get("tool", {}) - .get("beanie", {}) - .get("migrations", {}) - ) - else: - val = {} - return val.get(field_name) - - -@click.group() -def migrations(): - pass - - -async def run_migrate(settings: MigrationSettings): - DBHandler.set_db(settings.connection_uri, settings.database_name) - root = await MigrationNode.build(settings.path) - mode = RunningMode( - direction=settings.direction, distance=settings.distance - ) - await root.run( - mode=mode, - allow_index_dropping=settings.allow_index_dropping, - use_transaction=settings.use_transaction, - ) - - # Cleanup - client = DBHandler.get_cli() - if client: - await client.close() - - -@migrations.command() -@click.option( - "--forward", - "direction", - required=False, - flag_value="FORWARD", - help="Roll the migrations forward. This is default", -) -@click.option( - "--backward", - "direction", - required=False, - flag_value="BACKWARD", - help="Roll the migrations backward", -) -@click.option( - "-d", - "--distance", - required=False, - help="How many migrations should be done since the current? " - "0 - all the migrations. Default is 0", -) -@click.option( - "-uri", - "--connection-uri", - required=False, - type=str, - help="MongoDB connection URI", -) -@click.option( - "-db", "--database_name", required=False, type=str, help="DataBase name" -) -@click.option( - "-p", - "--path", - required=False, - type=str, - help="Path to the migrations directory", -) -@click.option( - "--allow-index-dropping/--forbid-index-dropping", - required=False, - default=False, - help="if allow-index-dropping is set, Beanie will drop indexes from your collection", -) -@click.option( - "--use-transaction/--no-use-transaction", - required=False, - default=True, - help="Enable or disable the use of transactions during migration. " - "When enabled (--use-transaction), Beanie uses transactions for migration, " - "which necessitates a replica set. When disabled (--no-use-transaction), " - "migrations occur without transactions.", -) -def migrate( - direction, - distance, - connection_uri, - database_name, - path, - allow_index_dropping, - use_transaction, -): - settings_kwargs = {} - if direction: - settings_kwargs["direction"] = direction - if distance: - settings_kwargs["distance"] = distance - if connection_uri: - settings_kwargs["connection_uri"] = connection_uri - if database_name: - settings_kwargs["database_name"] = database_name - if path: - settings_kwargs["path"] = path - if allow_index_dropping: - settings_kwargs["allow_index_dropping"] = allow_index_dropping - settings_kwargs["use_transaction"] = use_transaction - settings = MigrationSettings(**settings_kwargs) - - asyncio.run(run_migrate(settings)) - - -@migrations.command() -@click.option("-n", "--name", required=True, type=str, help="Migration name") -@click.option( - "-p", - "--path", - required=True, - type=str, - help="Path to the migrations directory", -) -def new_migration(name, path): - path = Path(path) - ts_string = datetime.now().strftime("%Y%m%d%H%M%S") - file_name = f"{ts_string}_{name}.py" - - shutil.copy(template.__file__, path / file_name) - - -if __name__ == "__main__": - migrations() diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/__init__.py b/venv/lib/python3.12/site-packages/beanie/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 47cef856..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/database.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/database.cpython-312.pyc deleted file mode 100644 index 3f553cd6..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/database.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/models.cpython-312.pyc deleted file mode 100644 index b8c0dd18..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/runner.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/runner.cpython-312.pyc deleted file mode 100644 index 8489d517..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/runner.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/template.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/template.cpython-312.pyc deleted file mode 100644 index 1a89378f..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/template.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 33e29c80..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__init__.py b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 8a723511..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 73b2101c..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/free_fall.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/free_fall.cpython-312.pyc deleted file mode 100644 index 130603e2..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/free_fall.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/iterative.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/iterative.cpython-312.pyc deleted file mode 100644 index 5e3f3cf6..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/__pycache__/iterative.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/base.py b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/base.py deleted file mode 100644 index c49852f0..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/base.py +++ /dev/null @@ -1,18 +0,0 @@ -from abc import ABC, abstractmethod -from typing import List, Type - -from beanie.odm.documents import Document - - -class BaseMigrationController(ABC): - def __init__(self, function): - self.function = function - - @abstractmethod - async def run(self, session): - pass - - @property - @abstractmethod - def models(self) -> List[Type[Document]]: - pass diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/free_fall.py b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/free_fall.py deleted file mode 100644 index 31c40c8a..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/free_fall.py +++ /dev/null @@ -1,28 +0,0 @@ -from inspect import signature -from typing import Any, List, Type - -from beanie.migrations.controllers.base import BaseMigrationController -from beanie.odm.documents import Document - - -def free_fall_migration(document_models: List[Type[Document]]): - class FreeFallMigrationController(BaseMigrationController): - def __init__(self, function): - self.function = function - self.function_signature = signature(function) - self.document_models = document_models - - def __call__(self, *args: Any, **kwargs: Any): - pass - - @property - def models(self) -> List[Type[Document]]: - return self.document_models - - async def run(self, session): - function_kwargs = {"session": session} - if "self" in self.function_signature.parameters: - function_kwargs["self"] = None - await self.function(**function_kwargs) - - return FreeFallMigrationController diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/iterative.py b/venv/lib/python3.12/site-packages/beanie/migrations/controllers/iterative.py deleted file mode 100644 index 6368e92a..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/controllers/iterative.py +++ /dev/null @@ -1,134 +0,0 @@ -import asyncio -from inspect import isclass, signature -from typing import Any, List, Optional, Type, Union - -from beanie.migrations.controllers.base import BaseMigrationController -from beanie.migrations.utils import update_dict -from beanie.odm.documents import Document -from beanie.odm.utils.pydantic import IS_PYDANTIC_V2, parse_model - - -class DummyOutput: - def __init__(self): - super(DummyOutput, self).__setattr__("_internal_structure_dict", {}) - - def __setattr__(self, key, value): - self._internal_structure_dict[key] = value - - def __getattr__(self, item): - try: - return self._internal_structure_dict[item] - except KeyError: - self._internal_structure_dict[item] = DummyOutput() - return self._internal_structure_dict[item] - - def dict(self, to_parse: Optional[Union[dict, "DummyOutput"]] = None): - if to_parse is None: - to_parse = self - input_dict = ( - to_parse._internal_structure_dict - if isinstance(to_parse, DummyOutput) - else to_parse - ) - result_dict = {} - for key, value in input_dict.items(): - if isinstance(value, (DummyOutput, dict)): - result_dict[key] = self.dict(to_parse=value) - else: - result_dict[key] = value - return result_dict - - -def iterative_migration( - document_models: Optional[List[Type[Document]]] = None, - batch_size: int = 10000, -): - class IterativeMigration(BaseMigrationController): - def __init__(self, function): - self.function = function - self.function_signature = signature(function) - input_signature = self.function_signature.parameters.get( - "input_document" - ) - if input_signature is None: - raise RuntimeError("input_signature must not be None") - self.input_document_model: Type[Document] = ( - input_signature.annotation - ) - output_signature = self.function_signature.parameters.get( - "output_document" - ) - if output_signature is None: - raise RuntimeError("output_signature must not be None") - self.output_document_model: Type[Document] = ( - output_signature.annotation - ) - - if ( - not isclass(self.input_document_model) - or not issubclass(self.input_document_model, Document) - or not isclass(self.output_document_model) - or not issubclass(self.output_document_model, Document) - ): - raise TypeError( - "input_document and output_document " - "must have annotation of Document subclass" - ) - - self.batch_size = batch_size - - def __call__(self, *args: Any, **kwargs: Any): - pass - - @property - def models(self) -> List[Type[Document]]: - preset_models = document_models - if preset_models is None: - preset_models = [] - return preset_models + [ - self.input_document_model, - self.output_document_model, - ] - - async def run(self, session): - output_documents = [] - all_migration_ops = [] - async for input_document in self.input_document_model.find_all( - session=session - ): - output = DummyOutput() - function_kwargs = { - "input_document": input_document, - "output_document": output, - } - if "self" in self.function_signature.parameters: - function_kwargs["self"] = None - await self.function(**function_kwargs) - output_dict = ( - input_document.dict() - if not IS_PYDANTIC_V2 - else input_document.model_dump() - ) - update_dict(output_dict, output.dict()) - output_document = parse_model( - self.output_document_model, output_dict - ) - output_documents.append(output_document) - - if len(output_documents) == self.batch_size: - all_migration_ops.append( - self.output_document_model.replace_many( - documents=output_documents, session=session - ) - ) - output_documents = [] - - if output_documents: - all_migration_ops.append( - self.output_document_model.replace_many( - documents=output_documents, session=session - ) - ) - await asyncio.gather(*all_migration_ops) - - return IterativeMigration diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/database.py b/venv/lib/python3.12/site-packages/beanie/migrations/database.py deleted file mode 100644 index 3e524073..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/database.py +++ /dev/null @@ -1,16 +0,0 @@ -from pymongo import AsyncMongoClient - - -class DBHandler: - @classmethod - def set_db(cls, uri, db_name): - cls.client = AsyncMongoClient(uri) - cls.database = cls.client[db_name] - - @classmethod - def get_cli(cls): - return cls.client if hasattr(cls, "client") else None - - @classmethod - def get_db(cls): - return cls.database if hasattr(cls, "database") else None diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/models.py b/venv/lib/python3.12/site-packages/beanie/migrations/models.py deleted file mode 100644 index c6de41a6..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/models.py +++ /dev/null @@ -1,33 +0,0 @@ -from datetime import datetime -from enum import Enum -from typing import List, Optional - -from pydantic import Field -from pydantic.main import BaseModel - -from beanie.odm.documents import Document - - -class MigrationLog(Document): - ts: datetime = Field(default_factory=datetime.now) - name: str - is_current: bool - - class Settings: - name = "migrations_log" - - -class RunningDirections(str, Enum): - FORWARD = "FORWARD" - BACKWARD = "BACKWARD" - - -class RunningMode(BaseModel): - direction: RunningDirections - distance: int = 0 - - -class ParsedMigrations(BaseModel): - path: str - names: List[str] - current: Optional[MigrationLog] = None diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/runner.py b/venv/lib/python3.12/site-packages/beanie/migrations/runner.py deleted file mode 100644 index 5d97e732..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/runner.py +++ /dev/null @@ -1,246 +0,0 @@ -import logging -import types -from importlib.machinery import SourceFileLoader -from pathlib import Path -from typing import List, Optional, Type - -from pymongo.asynchronous.client_session import AsyncClientSession -from pymongo.asynchronous.database import AsyncDatabase - -from beanie.migrations.controllers.iterative import BaseMigrationController -from beanie.migrations.database import DBHandler -from beanie.migrations.models import ( - MigrationLog, - RunningDirections, - RunningMode, -) -from beanie.odm.documents import Document -from beanie.odm.utils.init import init_beanie - -logger = logging.getLogger(__name__) - - -class MigrationNode: - def __init__( - self, - name: str, - forward_class: Optional[Type[Document]] = None, - backward_class: Optional[Type[Document]] = None, - next_migration: Optional["MigrationNode"] = None, - prev_migration: Optional["MigrationNode"] = None, - ): - """ - Node of the migration linked list - - :param name: name of the migration - :param forward_class: Forward class of the migration - :param backward_class: Backward class of the migration - :param next_migration: link to the next migration - :param prev_migration: link to the previous migration - """ - self.name = name - self.forward_class = forward_class - self.backward_class = backward_class - self.next_migration = next_migration - self.prev_migration = prev_migration - - @staticmethod - async def clean_current_migration(): - await MigrationLog.find( - {"is_current": True}, - ).update({"$set": {"is_current": False}}) - - async def update_current_migration(self): - """ - Set sel as a current migration - - :return: - """ - await self.clean_current_migration() - await MigrationLog(is_current=True, name=self.name).insert() - - async def run( - self, - mode: RunningMode, - allow_index_dropping: bool, - use_transaction: bool, - ): - """ - Migrate - - :param mode: RunningMode - :param allow_index_dropping: if index dropping is allowed - :return: None - """ - if mode.direction == RunningDirections.FORWARD: - migration_node = self.next_migration - if migration_node is None: - return None - if mode.distance == 0: - logger.info("Running migrations forward without limit") - while True: - await migration_node.run_forward( - allow_index_dropping=allow_index_dropping, - use_transaction=use_transaction, - ) - migration_node = migration_node.next_migration - if migration_node is None: - break - else: - logger.info(f"Running {mode.distance} migrations forward") - for i in range(mode.distance): - await migration_node.run_forward( - allow_index_dropping=allow_index_dropping, - use_transaction=use_transaction, - ) - migration_node = migration_node.next_migration - if migration_node is None: - break - elif mode.direction == RunningDirections.BACKWARD: - migration_node = self - if mode.distance == 0: - logger.info("Running migrations backward without limit") - while True: - await migration_node.run_backward( - allow_index_dropping=allow_index_dropping, - use_transaction=use_transaction, - ) - migration_node = migration_node.prev_migration - if migration_node is None: - break - else: - logger.info(f"Running {mode.distance} migrations backward") - for i in range(mode.distance): - await migration_node.run_backward( - allow_index_dropping=allow_index_dropping, - use_transaction=use_transaction, - ) - migration_node = migration_node.prev_migration - if migration_node is None: - break - - async def run_forward( - self, allow_index_dropping: bool, use_transaction: bool - ): - if self.forward_class is not None: - await self.run_migration_class( - self.forward_class, - allow_index_dropping=allow_index_dropping, - use_transaction=use_transaction, - ) - await self.update_current_migration() - - async def run_backward( - self, allow_index_dropping: bool, use_transaction: bool - ): - if self.backward_class is not None: - await self.run_migration_class( - self.backward_class, - allow_index_dropping=allow_index_dropping, - use_transaction=use_transaction, - ) - if self.prev_migration is not None: - await self.prev_migration.update_current_migration() - else: - await self.clean_current_migration() - - async def run_migration_class( - self, cls: Type, allow_index_dropping: bool, use_transaction: bool - ): - """ - Run Backward or Forward migration class - - :param cls: - :param allow_index_dropping: if index dropping is allowed - :return: - """ - migrations = [ - getattr(cls, migration) - for migration in dir(cls) - if isinstance(getattr(cls, migration), BaseMigrationController) - ] - - client = DBHandler.get_cli() - db = DBHandler.get_db() - if client is None: - raise RuntimeError("client must not be None") - async with client.start_session() as s: - if use_transaction: - async with await s.start_transaction(): - await self.run_migrations( - migrations, db, allow_index_dropping, s - ) - else: - await self.run_migrations( - migrations, db, allow_index_dropping, s - ) - - async def run_migrations( - self, - migrations: List[BaseMigrationController], - db: AsyncDatabase, - allow_index_dropping: bool, - session: AsyncClientSession, - ) -> None: - for migration in migrations: - for model in migration.models: - await init_beanie( - database=db, - document_models=[model], # type: ignore - allow_index_dropping=allow_index_dropping, - ) # TODO this is slow - logger.info( - f"Running migration {migration.function.__name__} " - f"from module {self.name}" - ) - await migration.run(session=session) - - @classmethod - async def build(cls, path: Path): - """ - Build the migrations linked list - - :param path: Relative path to the migrations directory - :return: - """ - logger.info("Building migration list") - names = [] - # Skip migrations that start with an underscore - for modulepath in path.glob("[!_]*.py"): - names.append(modulepath.name) - names.sort() - - db = DBHandler.get_db() - await init_beanie( - database=db, - document_models=[MigrationLog], # type: ignore - ) - current_migration = await MigrationLog.find_one({"is_current": True}) - - root_migration_node = cls("root") - prev_migration_node = root_migration_node - - for name in names: - loader = SourceFileLoader( - (path / name).stem, str((path / name).absolute()) - ) - module = types.ModuleType(loader.name) - loader.exec_module(module) - forward_class = getattr(module, "Forward", None) - backward_class = getattr(module, "Backward", None) - migration_node = cls( - name=name, - prev_migration=prev_migration_node, - forward_class=forward_class, - backward_class=backward_class, - ) - prev_migration_node.next_migration = migration_node - prev_migration_node = migration_node - - if ( - current_migration is not None - and current_migration.name == name - ): - root_migration_node = migration_node - - return root_migration_node diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/template.py b/venv/lib/python3.12/site-packages/beanie/migrations/template.py deleted file mode 100644 index 7b9261b6..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/template.py +++ /dev/null @@ -1,4 +0,0 @@ -class Forward: ... - - -class Backward: ... diff --git a/venv/lib/python3.12/site-packages/beanie/migrations/utils.py b/venv/lib/python3.12/site-packages/beanie/migrations/utils.py deleted file mode 100644 index cbc8b976..00000000 --- a/venv/lib/python3.12/site-packages/beanie/migrations/utils.py +++ /dev/null @@ -1,7 +0,0 @@ -def update_dict(d, u): - for k, v in u.items(): - if isinstance(v, dict): - d[k] = update_dict(d.get(k, {}), v) - else: - d[k] = v - return d diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 190c600a..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/actions.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/actions.cpython-312.pyc deleted file mode 100644 index 7959b915..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/actions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/bulk.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/bulk.cpython-312.pyc deleted file mode 100644 index 80b198f8..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/bulk.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/cache.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/cache.cpython-312.pyc deleted file mode 100644 index 8b3f3487..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/cache.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/documents.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/documents.cpython-312.pyc deleted file mode 100644 index 18e8508c..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/documents.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/enums.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/enums.cpython-312.pyc deleted file mode 100644 index 0aa2718a..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/enums.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/fields.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/fields.cpython-312.pyc deleted file mode 100644 index b56fbef1..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/fields.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/models.cpython-312.pyc deleted file mode 100644 index c9017d54..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/registry.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/registry.cpython-312.pyc deleted file mode 100644 index 55740dab..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/registry.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/union_doc.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/union_doc.cpython-312.pyc deleted file mode 100644 index db5db976..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/union_doc.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/views.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/views.cpython-312.pyc deleted file mode 100644 index be29cad2..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/__pycache__/views.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/actions.py b/venv/lib/python3.12/site-packages/beanie/odm/actions.py deleted file mode 100644 index 3feca8e4..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/actions.py +++ /dev/null @@ -1,255 +0,0 @@ -import asyncio -import inspect -from enum import Enum -from functools import wraps -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) - -from typing_extensions import ParamSpec - -if TYPE_CHECKING: - from beanie.odm.documents import AsyncDocMethod, DocType, Document - -P = ParamSpec("P") -R = TypeVar("R") - - -class EventTypes(str, Enum): - INSERT = "INSERT" - REPLACE = "REPLACE" - SAVE = "SAVE" - SAVE_CHANGES = "SAVE_CHANGES" - VALIDATE_ON_SAVE = "VALIDATE_ON_SAVE" - DELETE = "DELETE" - UPDATE = "UPDATE" - - -Insert = EventTypes.INSERT -Replace = EventTypes.REPLACE -Save = EventTypes.SAVE -SaveChanges = EventTypes.SAVE_CHANGES -ValidateOnSave = EventTypes.VALIDATE_ON_SAVE -Delete = EventTypes.DELETE -Update = EventTypes.UPDATE - - -class ActionDirections(str, Enum): # TODO think about this name - BEFORE = "BEFORE" - AFTER = "AFTER" - - -Before = ActionDirections.BEFORE -After = ActionDirections.AFTER - - -class ActionRegistry: - _actions: Dict[ - Type["Document"], - Dict[EventTypes, Dict[ActionDirections, List[Callable[..., Any]]]], - ] = {} - - @classmethod - def clean_actions(cls, document_class: Type["Document"]): - if cls._actions.get(document_class) is not None: - del cls._actions[document_class] - - @classmethod - def add_action( - cls, - document_class: Type["Document"], - event_types: List[EventTypes], - action_direction: ActionDirections, - funct: Callable, - ): - """ - Add action to the action registry - :param document_class: document class - :param event_types: List[EventTypes] - :param action_direction: ActionDirections - before or after - :param funct: Callable - function - """ - if cls._actions.get(document_class) is None: - cls._actions[document_class] = { - action_type: { - action_direction: [] - for action_direction in ActionDirections - } - for action_type in EventTypes - } - for event_type in event_types: - cls._actions[document_class][event_type][action_direction].append( - funct - ) - - @classmethod - def get_action_list( - cls, - document_class: Type["Document"], - event_type: EventTypes, - action_direction: ActionDirections, - ) -> List[Callable]: - """ - Get stored action list - :param document_class: Type - document class - :param event_type: EventTypes - type of needed event - :param action_direction: ActionDirections - before or after - :return: List[Callable] - list of stored methods - """ - if document_class not in cls._actions: - return [] - return cls._actions[document_class][event_type][action_direction] - - @classmethod - async def run_actions( - cls, - instance: "Document", - event_type: EventTypes, - action_direction: ActionDirections, - exclude: List[Union[ActionDirections, str]], - ): - """ - Run actions - :param instance: Document - object of the Document subclass - :param event_type: EventTypes - event types - :param action_direction: ActionDirections - before or after - """ - if action_direction in exclude: - return - - document_class = instance.__class__ - actions_list = cls.get_action_list( - document_class, event_type, action_direction - ) - coros = [] - for action in actions_list: - if action.__name__ in exclude: - continue - - if inspect.iscoroutinefunction(action): - coros.append(action(instance)) - elif inspect.isfunction(action): - action(instance) - await asyncio.gather(*coros) - - -# `Any` because there is arbitrary attribute assignment on this type -F = TypeVar("F", bound=Any) - - -def register_action( - event_types: Tuple[Union[List[EventTypes], EventTypes], ...], - action_direction: ActionDirections, -) -> Callable[[F], F]: - """ - Decorator. Base registration method. - Used inside `before_event` and `after_event` - :param event_types: Union[List[EventTypes], EventTypes] - event types - :param action_direction: ActionDirections - before or after - :return: - """ - final_event_types = [] - for event_type in event_types: - if isinstance(event_type, list): - final_event_types.extend(event_type) - else: - final_event_types.append(event_type) - - def decorator(f: F) -> F: - f.has_action = True - f.event_types = final_event_types - f.action_direction = action_direction - return f - - return decorator - - -def before_event( - *args: Union[List[EventTypes], EventTypes], -) -> Callable[[F], F]: - """ - Decorator. It adds action, which should run before mentioned one - or many events happen - - :param args: Union[List[EventTypes], EventTypes] - event types - :return: None - """ - return register_action( - action_direction=ActionDirections.BEFORE, event_types=args - ) - - -def after_event( - *args: Union[List[EventTypes], EventTypes], -) -> Callable[[F], F]: - """ - Decorator. It adds action, which should run after mentioned one - or many events happen - - :param args: Union[List[EventTypes], EventTypes] - event types - :return: None - """ - - return register_action( - action_direction=ActionDirections.AFTER, event_types=args - ) - - -def wrap_with_actions( - event_type: EventTypes, -) -> Callable[["AsyncDocMethod[DocType, P, R]"], Any]: - """ - Helper function to wrap Document methods with - before and after event listeners - :param event_type: EventTypes - event types - :return: None - """ - - def decorator( - f: "AsyncDocMethod[DocType, P, R]", - ) -> "AsyncDocMethod[DocType, P, R]": - @wraps(f) - async def wrapper( # type: ignore - self: "DocType", - *args: P.args, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - **kwargs: P.kwargs, - ) -> R: - if skip_actions is None: - skip_actions = [] - - await ActionRegistry.run_actions( - self, - event_type=event_type, - action_direction=ActionDirections.BEFORE, - exclude=skip_actions, - ) - - result = await f( - self, - *args, - skip_actions=skip_actions, # type: ignore[arg-type] - **kwargs, - ) - - await ActionRegistry.run_actions( - self, - event_type=event_type, - action_direction=ActionDirections.AFTER, - exclude=skip_actions, - ) - - return result - - return wrapper - - return decorator diff --git a/venv/lib/python3.12/site-packages/beanie/odm/bulk.py b/venv/lib/python3.12/site-packages/beanie/odm/bulk.py deleted file mode 100644 index f0e638f1..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/bulk.py +++ /dev/null @@ -1,157 +0,0 @@ -from __future__ import annotations - -from types import TracebackType -from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Type, Union - -from pymongo import ( - DeleteMany, - DeleteOne, - InsertOne, - ReplaceOne, - UpdateMany, - UpdateOne, -) -from pymongo.asynchronous.client_session import AsyncClientSession -from pymongo.results import BulkWriteResult - -if TYPE_CHECKING: - from beanie import Document - from beanie.odm.union_doc import UnionDoc - -_WriteOp = Union[ - InsertOne[Mapping[Any, Any]], - DeleteOne, - DeleteMany, - ReplaceOne[Mapping[Any, Any]], - UpdateOne, - UpdateMany, -] - - -class BulkWriter: - """ - A utility class for managing and executing bulk operations. - - This class facilitates the efficient execution of multiple database operations - (e.g., inserts, updates, deletes, replacements) in a single batch. It supports asynchronous - context management and ensures that all queued operations are committed upon exiting the context. - - Attributes: - session Optional[AsyncClientSession]: - The pymongo session used for transactional operations. - Defaults to None, meaning no session is used. - ordered Optional[bool]: - Specifies whether operations are executed sequentially (default) or in parallel. - - If True, operations are performed serially, stopping at the first failure. - - If False, operations may be executed in arbitrary order, and all operations are attempted - regardless of individual failures. - bypass_document_validation Optional[bool]: - If True, document-level validation is bypassed for all operations - in the bulk write. This applies to MongoDB's schema validation rules, allowing documents that - do not meet validation criteria to be inserted or modified. Defaults to False. - comment Optional[Any]: - A user-provided comment attached to the bulk operation command, useful for - auditing and debugging purposes. - operations List[Union[DeleteMany, DeleteOne, InsertOne, ReplaceOne, UpdateMany, UpdateOne]]: - A list of MongoDB operations queued for bulk execution. - object_class Type[Union[Document, UnionDoc]]: - The document model class associated with the operations. - - Parameters: - session Optional[AsyncClientSession]: The pymongo session for transaction support. - Defaults to None (no session). - ordered Optional[bool]: Specifies whether operations are executed in sequence (True) or in parallel (False). - Defaults to True. - bypass_document_validation Optional[bool]: Allows the bulk operation to bypass document-level validation. - This is particularly useful when working with schemas that are being phased in or for bulk imports - where strict validation may not be necessary. Defaults to False. - comment Optional[Any]: A custom comment attached to the bulk operation. - Defaults to None. - object_class Type[Union[Document, UnionDoc]]: The document model class associated with the operations. - """ - - def __init__( - self, - session: Optional[AsyncClientSession] = None, - ordered: bool = True, - object_class: Optional[Type[Union[Document, UnionDoc]]] = None, - bypass_document_validation: Optional[bool] = False, - comment: Optional[Any] = None, - ) -> None: - self.operations: List[_WriteOp] = [] - self.session = session - self.ordered = ordered - self.object_class = object_class - self.bypass_document_validation = bypass_document_validation - self.comment = comment - self._collection_name: Optional[str] = ( - object_class.get_collection_name() if object_class else None - ) - - async def __aenter__(self) -> "BulkWriter": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - if exc_type is None: - await self.commit() - - async def commit(self) -> Optional[BulkWriteResult]: - """ - Commit all queued operations to the database. - - Executes all queued operations in a single bulk write request. If there - are no operations to commit, it returns ``None``. - - :return: The result of the bulk write operation if operations are committed. - Returns ``None`` if there are no operations to execute. - :rtype: Optional[BulkWriteResult] - - :raises ValueError: - If the object_class is not specified before committing. - """ - if not self.operations: - return None - if not self.object_class: - raise ValueError( - "The document model class must be specified before committing operations." - ) - return await self.object_class.get_pymongo_collection().bulk_write( - self.operations, - ordered=self.ordered, - bypass_document_validation=self.bypass_document_validation, - session=self.session, - comment=self.comment, - ) - - def add_operation( - self, - object_class: Type[Union[Document, UnionDoc]], - operation: _WriteOp, - ): - """ - Add an operation to the queue. - - This method adds a MongoDB operation to the BulkWriter's operation queue. - All operations in the queue must belong to the same collection. - - :param object_class: Type[Union[Document, UnionDoc]] - The document model class associated with the operation. - :param operation: Union[DeleteMany, DeleteOne, InsertOne, ReplaceOne, UpdateMany, UpdateOne] - The MongoDB operation to add to the queue. - :raises ValueError: - If the collection differs from the one already associated with the BulkWriter. - """ - if self.object_class is None: - self.object_class = object_class - self._collection_name = object_class.get_collection_name() - else: - if object_class.get_collection_name() != self._collection_name: - raise ValueError( - "All the operations should be for a same collection name" - ) - self.operations.append(operation) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/cache.py b/venv/lib/python3.12/site-packages/beanie/odm/cache.py deleted file mode 100644 index f5297d48..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/cache.py +++ /dev/null @@ -1,45 +0,0 @@ -import collections -import datetime -from datetime import timedelta, timezone -from typing import Any, Optional - -from pydantic import BaseModel, Field - - -class CachedItem(BaseModel): - timestamp: datetime.datetime = Field( - default_factory=lambda: datetime.datetime.now(tz=timezone.utc) - ) - value: Any - - -class LRUCache: - def __init__(self, capacity: int, expiration_time: timedelta): - self.capacity: int = capacity - self.expiration_time: timedelta = expiration_time - self.cache: collections.OrderedDict = collections.OrderedDict() - - def get(self, key) -> Optional[CachedItem]: - try: - item: CachedItem = self.cache.pop(key) - if ( - datetime.datetime.now(tz=timezone.utc) - item.timestamp - > self.expiration_time - ): - return None - self.cache[key] = item - return item.value - except KeyError: - return None - - def set(self, key, value) -> None: - try: - self.cache.pop(key) - except KeyError: - if len(self.cache) >= self.capacity: - self.cache.popitem(last=False) - self.cache[key] = CachedItem(value=value) - - @staticmethod - def create_key(*args): - return str(args) # TODO think about this diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__init__.py deleted file mode 100644 index f7a911d9..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 - -if IS_PYDANTIC_V2: - from beanie.odm.custom_types.decimal import DecimalAnnotation -else: - from decimal import Decimal as DecimalAnnotation - -__all__ = [ - "DecimalAnnotation", -] diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index bec6d88b..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/decimal.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/decimal.cpython-312.pyc deleted file mode 100644 index b35eb957..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/decimal.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/re.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/re.cpython-312.pyc deleted file mode 100644 index 99348b39..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/__pycache__/re.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 730537e7..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__pycache__/binary.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__pycache__/binary.cpython-312.pyc deleted file mode 100644 index 787e4c63..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/__pycache__/binary.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/binary.py b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/binary.py deleted file mode 100644 index a3aace5b..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/bson/binary.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Any - -import bson -import pydantic -from typing_extensions import Annotated - -from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 - - -def _to_bson_binary(value: Any) -> bson.Binary: - return value if isinstance(value, bson.Binary) else bson.Binary(value) - - -if IS_PYDANTIC_V2: - BsonBinary = Annotated[ - bson.Binary, pydantic.PlainValidator(_to_bson_binary) - ] -else: - - class BsonBinary(bson.Binary): # type: ignore[no-redef] - @classmethod - def __get_validators__(cls): - yield _to_bson_binary diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/decimal.py b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/decimal.py deleted file mode 100644 index 89d833f7..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/decimal.py +++ /dev/null @@ -1,12 +0,0 @@ -import decimal - -import bson -import pydantic -from typing_extensions import Annotated - -DecimalAnnotation = Annotated[ - decimal.Decimal, - pydantic.BeforeValidator( - lambda v: v.to_decimal() if isinstance(v, bson.Decimal128) else v - ), -] diff --git a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/re.py b/venv/lib/python3.12/site-packages/beanie/odm/custom_types/re.py deleted file mode 100644 index 0282eb2e..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/custom_types/re.py +++ /dev/null @@ -1,26 +0,0 @@ -import re - -import bson -import pydantic -from typing_extensions import Annotated - -from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 - - -def _to_bson_regex(v): - return v.try_compile() if isinstance(v, bson.Regex) else v - - -if IS_PYDANTIC_V2: - Pattern = Annotated[ - re.Pattern, - pydantic.BeforeValidator( - lambda v: v.try_compile() if isinstance(v, bson.Regex) else v - ), - ] -else: - - class Pattern(bson.Regex): # type: ignore[no-redef] - @classmethod - def __get_validators__(cls): - yield _to_bson_regex diff --git a/venv/lib/python3.12/site-packages/beanie/odm/documents.py b/venv/lib/python3.12/site-packages/beanie/odm/documents.py deleted file mode 100644 index 72b2557b..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/documents.py +++ /dev/null @@ -1,1385 +0,0 @@ -import asyncio -import warnings -from datetime import datetime, timezone -from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Callable, - ClassVar, - Coroutine, - Dict, - Iterable, - List, - Mapping, - Optional, - Tuple, - Type, - TypeVar, - Union, -) -from uuid import UUID, uuid4 - -from bson import DBRef, ObjectId -from lazy_model import LazyModel -from pydantic import ( - ConfigDict, - Field, - PrivateAttr, - ValidationError, -) -from pydantic.class_validators import root_validator -from pydantic.main import BaseModel -from pymongo import InsertOne -from pymongo.asynchronous.client_session import AsyncClientSession -from pymongo.errors import DuplicateKeyError -from pymongo.results import ( - DeleteResult, - InsertManyResult, -) -from typing_extensions import Concatenate, ParamSpec, Self, TypeAlias - -from beanie.exceptions import ( - CollectionWasNotInitialized, - DocumentNotFound, - DocumentWasNotSaved, - NotSupported, - ReplaceError, - RevisionIdWasChanged, -) -from beanie.odm.actions import ( - ActionDirections, - EventTypes, - wrap_with_actions, -) -from beanie.odm.bulk import BulkWriter -from beanie.odm.cache import LRUCache -from beanie.odm.enums import SortDirection -from beanie.odm.fields import ( - BackLink, - DeleteRules, - ExpressionField, - Link, - LinkInfo, - LinkTypes, - PydanticObjectId, - WriteRules, -) -from beanie.odm.interfaces.aggregate import AggregateInterface -from beanie.odm.interfaces.detector import ModelType -from beanie.odm.interfaces.find import FindInterface -from beanie.odm.interfaces.getters import OtherGettersInterface -from beanie.odm.interfaces.inheritance import InheritanceInterface -from beanie.odm.interfaces.setters import SettersInterface -from beanie.odm.models import ( - InspectionError, - InspectionResult, - InspectionStatuses, -) -from beanie.odm.operators.find.comparison import In -from beanie.odm.operators.update.general import ( - CurrentDate, - Inc, - SetRevisionId, - Unset, -) -from beanie.odm.operators.update.general import ( - Set as SetOperator, -) -from beanie.odm.queries.find import FindMany, FindOne -from beanie.odm.queries.update import UpdateMany, UpdateResponse -from beanie.odm.settings.document import DocumentSettings -from beanie.odm.utils.dump import get_dict, get_top_level_nones -from beanie.odm.utils.parsing import apply_changes, merge_models -from beanie.odm.utils.pydantic import ( - IS_PYDANTIC_V2, - get_extra_field_info, - get_field_type, - get_model_dump, - get_model_fields, - parse_model, - parse_object_as, -) -from beanie.odm.utils.self_validation import validate_self_before -from beanie.odm.utils.state import ( - previous_saved_state_needed, - save_state_after, - saved_state_needed, -) -from beanie.odm.utils.typing import extract_id_class - -if IS_PYDANTIC_V2: - from pydantic import model_validator - -if TYPE_CHECKING: - from beanie.odm.views import View - -FindType = TypeVar("FindType", bound=Union["Document", "View"]) -DocType = TypeVar("DocType", bound="Document") -P = ParamSpec("P") -R = TypeVar("R") -# can describe both sync and async, where R itself is a coroutine -AnyDocMethod: TypeAlias = Callable[Concatenate[DocType, P], R] -# describes only async -AsyncDocMethod: TypeAlias = Callable[ - Concatenate[DocType, P], Coroutine[Any, Any, R] -] -DocumentProjectionType = TypeVar("DocumentProjectionType", bound=BaseModel) - - -def json_schema_extra(schema: Dict[str, Any], model: Type["Document"]) -> None: - # remove excluded fields from the json schema - properties = schema.get("properties") - if not properties: - return - for k, field in get_model_fields(model).items(): - k = field.alias or k - if k not in properties: - continue - field_info = field if IS_PYDANTIC_V2 else field.field_info - if field_info.exclude: - del properties[k] - - -def document_alias_generator(s: str) -> str: - if s == "id": - return "_id" - return s - - -class MergeStrategy(str, Enum): - local = "local" - remote = "remote" - - -class Document( - LazyModel, - SettersInterface, - InheritanceInterface, - FindInterface, - AggregateInterface, - OtherGettersInterface, -): - """ - Document Mapping class. - - Fields: - - - `id` - MongoDB document ObjectID "_id" field. - Mapped to the PydanticObjectId class - """ - - if IS_PYDANTIC_V2: - model_config = ConfigDict( - json_schema_extra=json_schema_extra, - populate_by_name=True, - alias_generator=document_alias_generator, - ) - else: - - class Config: - json_encoders = {ObjectId: str} - allow_population_by_field_name = True - fields = {"id": "_id"} - schema_extra = staticmethod(json_schema_extra) - - id: Optional[PydanticObjectId] = Field( - default=None, description="MongoDB document ObjectID" - ) - - # State - revision_id: Optional[UUID] = Field(default=None, exclude=True) - _saved_state: Optional[Dict[str, Any]] = PrivateAttr(default=None) - _previous_saved_state: Optional[Dict[str, Any]] = PrivateAttr(default=None) - - # Relations - _link_fields: ClassVar[Optional[Dict[str, LinkInfo]]] = None - - # Cache - _cache: ClassVar[Optional[LRUCache]] = None - - # Settings - _document_settings: ClassVar[Optional[DocumentSettings]] = None - - # Database - _database_major_version: ClassVar[int] = 4 - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super(Document, self).__init__(*args, **kwargs) - self.get_pymongo_collection() - - @classmethod - def _fill_back_refs(cls, values): - if cls._link_fields: - for field_name, link_info in cls._link_fields.items(): - if ( - link_info.link_type - in [LinkTypes.BACK_DIRECT, LinkTypes.OPTIONAL_BACK_DIRECT] - and field_name not in values - ): - values[field_name] = BackLink[link_info.document_class]( - link_info.document_class - ) - if ( - link_info.link_type - in [LinkTypes.BACK_LIST, LinkTypes.OPTIONAL_BACK_LIST] - and field_name not in values - ): - values[field_name] = [ - BackLink[link_info.document_class]( - link_info.document_class - ) - ] - return values - - if IS_PYDANTIC_V2: - - @model_validator(mode="before") - def fill_back_refs(cls, values): - return cls._fill_back_refs(values) - - else: - - @root_validator(pre=True) - def fill_back_refs(cls, values): - return cls._fill_back_refs(values) - - @classmethod - async def get( - cls: Type["DocType"], - document_id: Any, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Optional["DocType"]: - """ - Get document by id, returns None if document does not exist - - :param document_id: PydanticObjectId - document id - :param session: Optional[AsyncClientSession] - pymongo session - :param ignore_cache: bool - ignore cache (if it is turned on) - :param **pymongo_kwargs: pymongo native parameters for find operation - :return: Union["Document", None] - """ - if not isinstance( - document_id, - extract_id_class(get_field_type(get_model_fields(cls)["id"])), - ): - document_id = parse_object_as( - get_field_type(get_model_fields(cls)["id"]), document_id - ) - - return await cls.find_one( - {"_id": document_id}, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - with_children=with_children, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - async def sync(self, merge_strategy: MergeStrategy = MergeStrategy.remote): - """ - Sync the document with the database - - :param merge_strategy: MergeStrategy - how to merge the document - :return: None - """ - if ( - merge_strategy == MergeStrategy.local - and self.get_settings().use_state_management is False - ): - raise ValueError( - "State management must be turned on to use local merge strategy" - ) - if self.id is None: - raise DocumentWasNotSaved - document = await self.find_one({"_id": self.id}) - if document is None: - raise DocumentNotFound - - if merge_strategy == MergeStrategy.local: - original_changes = self.get_changes() - new_state = document.get_saved_state() - if new_state is None: - raise DocumentWasNotSaved - changes_to_apply = self._collect_updates( - new_state, original_changes - ) - merge_models(self, document) - apply_changes(changes_to_apply, self) - elif merge_strategy == MergeStrategy.remote: - merge_models(self, document) - else: - raise ValueError("Invalid merge strategy") - - @wrap_with_actions(EventTypes.INSERT) - @save_state_after - @validate_self_before - async def insert( - self: Self, - *, - link_rule: WriteRules = WriteRules.DO_NOTHING, - session: Optional[AsyncClientSession] = None, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - ) -> Self: - """ - Insert the document (self) to the collection - :param link_rule: WriteRules - if "WriteRules.WRITE", it will insert Link Documents to db. - :param session: AsyncClientSession - pymongo session - :return: self - """ - if self.get_settings().use_revision: - self.revision_id = uuid4() - if link_rule == WriteRules.WRITE: - link_fields = self.get_link_fields() - if link_fields is not None: - for field_info in link_fields.values(): - value = getattr(self, field_info.field_name) - if field_info.link_type in [ - LinkTypes.DIRECT, - LinkTypes.OPTIONAL_DIRECT, - ]: - if isinstance(value, Document): - await value.save( - link_rule=WriteRules.WRITE, session=session - ) - if field_info.link_type in [ - LinkTypes.LIST, - LinkTypes.OPTIONAL_LIST, - ]: - if isinstance(value, List): - await asyncio.gather( - *[ - obj.save( - link_rule=WriteRules.WRITE, - session=session, - ) - for obj in value - if isinstance(obj, Document) - ] - ) - result = await self.get_pymongo_collection().insert_one( - get_dict( - self, to_db=True, keep_nulls=self.get_settings().keep_nulls - ), - session=session, - ) - new_id = result.inserted_id - if not isinstance( - new_id, - extract_id_class(get_field_type(get_model_fields(self)["id"])), - ): - new_id = parse_object_as( - get_field_type(get_model_fields(self)["id"]), new_id - ) - self.id = new_id - return self - - async def create( - self: Self, - session: Optional[AsyncClientSession] = None, - ) -> Self: - """ - The same as self.insert() - :return: self - """ - return await self.insert(session=session) - - @classmethod - async def insert_one( - cls: Type[DocType], - document: DocType, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional["BulkWriter"] = None, - link_rule: WriteRules = WriteRules.DO_NOTHING, - ) -> Optional[DocType]: - """ - Insert one document to the collection - :param document: Document - document to insert - :param session: AsyncClientSession - pymongo session - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :param link_rule: InsertRules - hot to manage link fields - :return: DocType - """ - if not isinstance(document, cls): - raise TypeError( - "Inserting document must be of the original document class" - ) - if bulk_writer is None: - return await document.insert(link_rule=link_rule, session=session) - else: - if link_rule == WriteRules.WRITE: - raise NotSupported( - "Cascade insert with bulk writing not supported" - ) - bulk_writer.add_operation( - type(document), - InsertOne( - get_dict( - document, - to_db=True, - keep_nulls=document.get_settings().keep_nulls, - ) - ), - ) - return None - - @classmethod - async def insert_many( - cls: Type[DocType], - documents: Iterable[DocType], - session: Optional[AsyncClientSession] = None, - link_rule: WriteRules = WriteRules.DO_NOTHING, - **pymongo_kwargs: Any, - ) -> InsertManyResult: - """ - Insert many documents to the collection - - :param documents: List["Document"] - documents to insert - :param session: AsyncClientSession - pymongo session - :param link_rule: InsertRules - how to manage link fields - :return: InsertManyResult - """ - if link_rule == WriteRules.WRITE: - raise NotSupported( - "Cascade insert not supported for insert many method" - ) - documents_list = [ - get_dict( - document, - to_db=True, - keep_nulls=document.get_settings().keep_nulls, - ) - for document in documents - ] - return await cls.get_pymongo_collection().insert_many( - documents_list, session=session, **pymongo_kwargs - ) - - @wrap_with_actions(EventTypes.REPLACE) - @save_state_after - @validate_self_before - async def replace( - self: Self, - ignore_revision: bool = False, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - link_rule: WriteRules = WriteRules.DO_NOTHING, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - ) -> Self: - """ - Fully update the document in the database - - :param session: Optional[AsyncClientSession] - pymongo session. - :param ignore_revision: bool - do force replace. - Used when revision based protection is turned on. - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :return: self - """ - if self.id is None: - raise ValueError("Document must have an id") - - if bulk_writer is not None and link_rule != WriteRules.DO_NOTHING: - raise NotSupported - - if link_rule == WriteRules.WRITE: - link_fields = self.get_link_fields() - if link_fields is not None: - for field_info in link_fields.values(): - value = getattr(self, field_info.field_name) - if field_info.link_type in [ - LinkTypes.DIRECT, - LinkTypes.OPTIONAL_DIRECT, - LinkTypes.BACK_DIRECT, - LinkTypes.OPTIONAL_BACK_DIRECT, - ]: - if isinstance(value, Document): - await value.replace( - link_rule=link_rule, - bulk_writer=bulk_writer, - ignore_revision=ignore_revision, - session=session, - ) - if field_info.link_type in [ - LinkTypes.LIST, - LinkTypes.OPTIONAL_LIST, - LinkTypes.BACK_LIST, - LinkTypes.OPTIONAL_BACK_LIST, - ]: - if isinstance(value, List): - await asyncio.gather( - *[ - obj.replace( - link_rule=link_rule, - bulk_writer=bulk_writer, - ignore_revision=ignore_revision, - session=session, - ) - for obj in value - if isinstance(obj, Document) - ] - ) - - use_revision_id = self.get_settings().use_revision - find_query: Dict[str, Any] = {"_id": self.id} - - if use_revision_id and not ignore_revision: - find_query["revision_id"] = self.revision_id - self.revision_id = uuid4() - try: - await self.find_one(find_query).replace_one( - self, - session=session, - bulk_writer=bulk_writer, - ) - except DocumentNotFound: - if use_revision_id and not ignore_revision: - raise RevisionIdWasChanged - else: - raise DocumentNotFound - return self - - @wrap_with_actions(EventTypes.SAVE) - @save_state_after - @validate_self_before - async def save( - self: Self, - session: Optional[AsyncClientSession] = None, - link_rule: WriteRules = WriteRules.DO_NOTHING, - ignore_revision: bool = False, - **kwargs: Any, - ) -> Self: - """ - Update an existing model in the database or - insert it if it does not yet exist. - - :param session: Optional[AsyncClientSession] - pymongo session. - :param link_rule: WriteRules - rules how to deal with links on writing - :param ignore_revision: bool - do force save. - :return: self - """ - if link_rule == WriteRules.WRITE: - link_fields = self.get_link_fields() - if link_fields is not None: - for field_info in link_fields.values(): - value = getattr(self, field_info.field_name) - if field_info.link_type in [ - LinkTypes.DIRECT, - LinkTypes.OPTIONAL_DIRECT, - LinkTypes.BACK_DIRECT, - LinkTypes.OPTIONAL_BACK_DIRECT, - ]: - if isinstance(value, Document): - await value.save( - link_rule=link_rule, session=session - ) - if field_info.link_type in [ - LinkTypes.LIST, - LinkTypes.OPTIONAL_LIST, - LinkTypes.BACK_LIST, - LinkTypes.OPTIONAL_BACK_LIST, - ]: - if isinstance(value, List): - await asyncio.gather( - *[ - obj.save( - link_rule=link_rule, session=session - ) - for obj in value - if isinstance(obj, Document) - ] - ) - - if self.get_settings().keep_nulls is False: - return await self.update( - SetOperator( - get_dict( - self, - to_db=True, - keep_nulls=self.get_settings().keep_nulls, - ) - ), - Unset(get_top_level_nones(self)), - session=session, - ignore_revision=ignore_revision, - upsert=True, - **kwargs, - ) - else: - return await self.update( - SetOperator( - get_dict( - self, - to_db=True, - keep_nulls=self.get_settings().keep_nulls, - ) - ), - session=session, - ignore_revision=ignore_revision, - upsert=True, - **kwargs, - ) - - @saved_state_needed - @wrap_with_actions(EventTypes.SAVE_CHANGES) - @validate_self_before - async def save_changes( - self: Self, - ignore_revision: bool = False, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - ) -> Optional[Self]: - """ - Save changes. - State management usage must be turned on - - :param ignore_revision: bool - ignore revision id, if revision is turned on - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :return: Optional[self] - """ - if not self.is_changed: - return None - changes = self.get_changes() - if self.get_settings().keep_nulls is False: - return await self.update( - SetOperator(changes), - Unset(get_top_level_nones(self)), - ignore_revision=ignore_revision, - session=session, - bulk_writer=bulk_writer, - ) - else: - return await self.set( - changes, - ignore_revision=ignore_revision, - session=session, - bulk_writer=bulk_writer, - ) - - @classmethod - async def replace_many( - cls: Type[DocType], - documents: List[DocType], - session: Optional[AsyncClientSession] = None, - ) -> None: - """ - Replace list of documents - - :param documents: List["Document"] - :param session: Optional[AsyncClientSession] - pymongo session. - :return: None - """ - ids_list = [document.id for document in documents] - if await cls.find(In(cls.id, ids_list)).count() != len(ids_list): - raise ReplaceError( - "Some of the documents are not exist in the collection" - ) - async with BulkWriter(session=session) as bulk_writer: - for document in documents: - await document.replace( - bulk_writer=bulk_writer, session=session - ) - - @wrap_with_actions(EventTypes.UPDATE) - @save_state_after - async def update( - self: Self, - *args: Union[Dict[Any, Any], Mapping[Any, Any]], - ignore_revision: bool = False, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - skip_sync: Optional[bool] = None, - **pymongo_kwargs: Any, - ) -> Self: - """ - Partially update the document in the database - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param session: AsyncClientSession - pymongo session. - :param ignore_revision: bool - force update. Will update even if revision id is not the same, as stored - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :param pymongo_kwargs: pymongo native parameters for update operation - :return: self - """ - arguments: list[Any] = list(args) - - if skip_sync is not None: - raise DeprecationWarning( - "skip_sync parameter is not supported. The document get synced always using atomic operation." - ) - use_revision_id = self.get_settings().use_revision - - if self.id is not None: - find_query: Dict[str, Any] = {"_id": self.id} - else: - find_query = {"_id": PydanticObjectId()} - - if use_revision_id and not ignore_revision: - find_query["revision_id"] = self.revision_id - - if use_revision_id: - new_revision_id = uuid4() - arguments.append(SetRevisionId(new_revision_id)) - try: - result = await self.find_one(find_query).update( - *arguments, - session=session, - response_type=UpdateResponse.NEW_DOCUMENT, - bulk_writer=bulk_writer, - **pymongo_kwargs, - ) - except DuplicateKeyError: - raise RevisionIdWasChanged - if bulk_writer is None: - if use_revision_id and not ignore_revision and result is None: - raise RevisionIdWasChanged - merge_models(self, result) - return self - - @classmethod - def update_all( - cls, - *args: Union[dict, Mapping], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ) -> UpdateMany: - """ - Partially update all the documents - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param session: AsyncClientSession - pymongo session. - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :param **pymongo_kwargs: pymongo native parameters for find operation - :return: UpdateMany query - """ - return cls.find_all().update_many( - *args, session=session, bulk_writer=bulk_writer, **pymongo_kwargs - ) - - def set( - self: Self, - expression: Dict[Union[ExpressionField, str, Any], Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - skip_sync: Optional[bool] = None, - **kwargs: Any, - ) -> Coroutine[None, None, Self]: - """ - Set values - - Example: - - ```python - - class Sample(Document): - one: int - - await Document.find(Sample.one == 1).set({Sample.one: 100}) - - ``` - - Uses [Set operator](operators/update.md#set) - - :param expression: Dict[Union[ExpressionField, str, Any], Any] - keys and - values to set - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - bulk writer - :param skip_sync: bool - skip doc syncing. Available for the direct instances only - :return: self - """ - return self.update( - SetOperator(expression), - session=session, - bulk_writer=bulk_writer, - skip_sync=skip_sync, - **kwargs, - ) - - def current_date( - self: Self, - expression: Dict[Union[datetime, ExpressionField, str], Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - skip_sync: Optional[bool] = None, - **kwargs: Any, - ) -> Coroutine[None, None, Self]: - """ - Set current date - - Uses [CurrentDate operator](operators/update.md#currentdate) - - :param expression: Dict[Union[datetime, ExpressionField, str], Any] - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - bulk writer - :param skip_sync: bool - skip doc syncing. Available for the direct instances only - :return: self - """ - return self.update( - CurrentDate(expression), - session=session, - bulk_writer=bulk_writer, - skip_sync=skip_sync, - **kwargs, - ) - - def inc( - self: Self, - expression: Dict[Union[ExpressionField, float, int, str], Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - skip_sync: Optional[bool] = None, - **kwargs: Any, - ) -> Coroutine[None, None, Self]: - """ - Increment - - Example: - - ```python - - class Sample(Document): - one: int - - await Document.find(Sample.one == 1).inc({Sample.one: 100}) - - ``` - - Uses [Inc operator](operators/update.md#inc) - - :param expression: Dict[Union[ExpressionField, float, int, str], Any] - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - bulk writer - :param skip_sync: bool - skip doc syncing. Available for the direct instances only - :return: self - """ - return self.update( - Inc(expression), - session=session, - bulk_writer=bulk_writer, - skip_sync=skip_sync, - **kwargs, - ) - - @wrap_with_actions(EventTypes.DELETE) - async def delete( - self, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - link_rule: DeleteRules = DeleteRules.DO_NOTHING, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - **pymongo_kwargs: Any, - ) -> Optional[DeleteResult]: - """ - Delete the document - - :param session: Optional[AsyncClientSession] - pymongo session. - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :param link_rule: DeleteRules - rules for link fields - :param **pymongo_kwargs: pymongo native parameters for delete operation - :return: Optional[DeleteResult] - pymongo DeleteResult instance. - """ - - if link_rule == DeleteRules.DELETE_LINKS: - link_fields = self.get_link_fields() - if link_fields is not None: - for field_info in link_fields.values(): - value = getattr(self, field_info.field_name) - if field_info.link_type in [ - LinkTypes.DIRECT, - LinkTypes.OPTIONAL_DIRECT, - LinkTypes.BACK_DIRECT, - LinkTypes.OPTIONAL_BACK_DIRECT, - ]: - if isinstance(value, Document): - await value.delete( - link_rule=DeleteRules.DELETE_LINKS, - **pymongo_kwargs, - ) - if field_info.link_type in [ - LinkTypes.LIST, - LinkTypes.OPTIONAL_LIST, - LinkTypes.BACK_LIST, - LinkTypes.OPTIONAL_BACK_LIST, - ]: - if isinstance(value, List): - await asyncio.gather( - *[ - obj.delete( - link_rule=DeleteRules.DELETE_LINKS, - **pymongo_kwargs, - ) - for obj in value - if isinstance(obj, Document) - ] - ) - - return await self.find_one({"_id": self.id}).delete( - session=session, bulk_writer=bulk_writer, **pymongo_kwargs - ) - - @classmethod - async def delete_all( - cls, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ) -> Optional[DeleteResult]: - """ - Delete all the documents - - :param session: Optional[AsyncClientSession] - pymongo session. - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :param **pymongo_kwargs: pymongo native parameters for delete operation - :return: Optional[DeleteResult] - pymongo DeleteResult instance. - """ - return await cls.find_all().delete( - session=session, bulk_writer=bulk_writer, **pymongo_kwargs - ) - - # State management - - @classmethod - def use_state_management(cls) -> bool: - """ - Is state management turned on - :return: bool - """ - return cls.get_settings().use_state_management - - @classmethod - def state_management_save_previous(cls) -> bool: - """ - Should we save the previous state after a commit to database - :return: bool - """ - return cls.get_settings().state_management_save_previous - - @classmethod - def state_management_replace_objects(cls) -> bool: - """ - Should objects be replaced when using state management - :return: bool - """ - return cls.get_settings().state_management_replace_objects - - def _save_state(self) -> None: - """ - Save current document state. Internal method - :return: None - """ - if self.use_state_management() and self.id is not None: - if self.state_management_save_previous(): - self._previous_saved_state = self._saved_state - - self._saved_state = get_dict( - self, - to_db=True, - keep_nulls=self.get_settings().keep_nulls, - exclude={"revision_id"}, - ) - - def get_saved_state(self) -> Optional[Dict[str, Any]]: - """ - Saved state getter. It is protected property. - :return: Optional[Dict[str, Any]] - saved state - """ - return self._saved_state - - def get_previous_saved_state(self) -> Optional[Dict[str, Any]]: - """ - Previous state getter. It is a protected property. - :return: Optional[Dict[str, Any]] - previous state - """ - return self._previous_saved_state - - @property - @saved_state_needed - def is_changed(self) -> bool: - if self._saved_state == get_dict( - self, - to_db=True, - keep_nulls=self.get_settings().keep_nulls, - exclude={"revision_id"}, - ): - return False - return True - - @property - @saved_state_needed - @previous_saved_state_needed - def has_changed(self) -> bool: - if ( - self._previous_saved_state is None - or self._previous_saved_state == self._saved_state - ): - return False - return True - - def _collect_updates( - self, old_dict: Dict[str, Any], new_dict: Dict[str, Any] - ) -> Dict[str, Any]: - """ - Compares old_dict with new_dict and returns field paths that have been updated - Args: - old_dict: dict1 - new_dict: dict2 - - Returns: dictionary with updates - - """ - updates = {} - if old_dict.keys() - new_dict.keys(): - updates = new_dict - else: - for field_name, field_value in new_dict.items(): - if field_value != old_dict.get(field_name): - if not self.state_management_replace_objects() and ( - isinstance(field_value, dict) - and isinstance(old_dict.get(field_name), dict) - ): - if old_dict.get(field_name) is None: - updates[field_name] = field_value - elif isinstance(field_value, dict) and isinstance( - old_dict.get(field_name), dict - ): - field_data = self._collect_updates( - old_dict.get(field_name), # type: ignore - field_value, - ) - - for k, v in field_data.items(): - updates[f"{field_name}.{k}"] = v - else: - updates[field_name] = field_value - - return updates - - @saved_state_needed - def get_changes(self) -> Dict[str, Any]: - return self._collect_updates( - self._saved_state, # type: ignore - get_dict( - self, - to_db=True, - keep_nulls=self.get_settings().keep_nulls, - exclude={"revision_id"}, - ), - ) - - @saved_state_needed - @previous_saved_state_needed - def get_previous_changes(self) -> Dict[str, Any]: - if self._previous_saved_state is None: - return {} - - return self._collect_updates( - self._previous_saved_state, - self._saved_state, # type: ignore - ) - - @saved_state_needed - def rollback(self) -> None: - if self.is_changed: - for key, value in self._saved_state.items(): # type: ignore - if key == "_id": - setattr(self, "id", value) - else: - setattr(self, key, value) - - # Other - - @classmethod - def get_settings(cls) -> DocumentSettings: - """ - Get document settings, which was created on - the initialization step - - :return: DocumentSettings class - """ - if cls._document_settings is None: - raise CollectionWasNotInitialized - return cls._document_settings - - @classmethod - async def inspect_collection( - cls, session: Optional[AsyncClientSession] = None - ) -> InspectionResult: - """ - Check, if documents, stored in the MongoDB collection - are compatible with the Document schema - - :param session: Optional[AsyncClientSession] - pymongo session - The session instance used for transactional operations. Defaults to None. - :return: InspectionResult - """ - inspection_result = InspectionResult() - async for json_document in cls.get_pymongo_collection().find( - {}, session=session - ): - try: - parse_model(cls, json_document) - except ValidationError as e: - if inspection_result.status == InspectionStatuses.OK: - inspection_result.status = InspectionStatuses.FAIL - inspection_result.errors.append( - InspectionError( - document_id=json_document["_id"], error=str(e) - ) - ) - return inspection_result - - @classmethod - def _check_hidden_fields(cls): - hidden_fields = [ - (name, field) - for name, field in get_model_fields(cls).items() - if get_extra_field_info(field, "hidden") is True - ] - if not hidden_fields: - return - warnings.warn( - f"{cls.__name__}: 'hidden=True' is deprecated, please use 'exclude=True'", - DeprecationWarning, - stacklevel=2, - ) - if IS_PYDANTIC_V2: - for name, field in hidden_fields: - field.exclude = True - del field.json_schema_extra["hidden"] - cls.model_rebuild(force=True) - else: - for name, field in hidden_fields: - field.field_info.exclude = True - del field.field_info.extra["hidden"] - cls.__exclude_fields__[name] = True - - @wrap_with_actions(event_type=EventTypes.VALIDATE_ON_SAVE) - async def validate_self(self, *args: Any, **kwargs: Any): - # TODO: it can be sync, but needs some actions controller improvements - if self.get_settings().validate_on_save: - new_model = parse_model(self.__class__, get_model_dump(self)) - merge_models(self, new_model) - - def to_ref(self): - if self.id is None: - raise DocumentWasNotSaved("Can not create dbref without id") - return DBRef(self.get_pymongo_collection().name, self.id) - - async def fetch_link(self, field: Union[str, Any]): - ref_obj = getattr(self, field, None) - if isinstance(ref_obj, Link): - value = await ref_obj.fetch(fetch_links=True) - setattr(self, field, value) - if isinstance(ref_obj, list) and ref_obj: - values = await Link.fetch_list(ref_obj, fetch_links=True) - setattr(self, field, values) - - async def fetch_all_links(self): - coros = [] - link_fields = self.get_link_fields() - if link_fields is not None: - for ref in link_fields.values(): - coros.append(self.fetch_link(ref.field_name)) # TODO lists - await asyncio.gather(*coros) - - @classmethod - def get_link_fields(cls) -> Optional[Dict[str, LinkInfo]]: - return cls._link_fields - - @classmethod - def get_model_type(cls) -> ModelType: - return ModelType.Document - - @classmethod - async def distinct( - cls, - key: str, - filter: Optional[Mapping[str, Any]] = None, - session: Optional[AsyncClientSession] = None, - **kwargs: Any, - ) -> list: - return await cls.get_pymongo_collection().distinct( - key=key, filter=filter, session=session, **kwargs - ) - - @classmethod - def link_from_id(cls, id: Any): - ref = DBRef(id=id, collection=cls.get_collection_name()) - return Link(ref, document_class=cls) - - @classmethod - def bulk_writer( - cls, - session: Optional[AsyncClientSession] = None, - ordered: bool = True, - bypass_document_validation: Optional[bool] = False, - comment: Optional[Any] = None, - ) -> BulkWriter: - """ - Returns a BulkWriter instance for handling bulk write operations. - - :param session: Optional[AsyncClientSession] - pymongo session. - The session instance used for transactional operations. - :param ordered: bool - If ``True`` (the default), requests will be performed on the server serially, in the order provided. If an error - occurs, all remaining operations are aborted. If ``False``, requests will be performed on the server in - arbitrary order, possibly in parallel, and all operations will be attempted. - :param bypass_document_validation: bool, optional - If ``True``, allows the write to opt-out of document-level validation. Default is ``False``. - :param comment: str, optional - A user-provided comment to attach to the BulkWriter. - - :returns: BulkWriter - An instance of BulkWriter configured with the provided settings. - - Example Usage: - -------------- - This method is typically used within an asynchronous context manager. - - .. code-block:: python - - async with Document.bulk_writer(ordered=True) as bulk: - await Document.insert_one(Document(field="value"), bulk_writer=bulk) - """ - return BulkWriter( - session, ordered, cls, bypass_document_validation, comment - ) - - -class DocumentWithSoftDelete(Document): - deleted_at: Optional[datetime] = None - - def is_deleted(self) -> bool: - return self.deleted_at is not None - - async def hard_delete( - self, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - link_rule: DeleteRules = DeleteRules.DO_NOTHING, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - **pymongo_kwargs: Any, - ) -> Optional[DeleteResult]: - return await super().delete( - session=session, - bulk_writer=bulk_writer, - link_rule=link_rule, - skip_actions=skip_actions, - **pymongo_kwargs, - ) - - async def delete( - self, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - link_rule: DeleteRules = DeleteRules.DO_NOTHING, - skip_actions: Optional[List[Union[ActionDirections, str]]] = None, - **pymongo_kwargs, - ) -> Optional[DeleteResult]: - self.deleted_at = datetime.now(tz=timezone.utc) - await self.save() - return None - - @classmethod - def find_many_in_all( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: - return cls._find_many_query_class(document_model=cls).find_many( - *args, - sort=sort, - skip=skip, - limit=limit, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - lazy_parse=lazy_parse, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - @classmethod - def find_many( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: - args = cls._add_class_id_filter(args, with_children) + ( - {"deleted_at": None}, - ) - return cls._find_many_query_class(document_model=cls).find_many( - *args, - sort=sort, - skip=skip, - limit=limit, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - lazy_parse=lazy_parse, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - @classmethod - def find_one( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindOne[FindType], FindOne["DocumentProjectionType"]]: - args = cls._add_class_id_filter(args, with_children) + ( - {"deleted_at": None}, - ) - return cls._find_one_query_class(document_model=cls).find_one( - *args, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/enums.py b/venv/lib/python3.12/site-packages/beanie/odm/enums.py deleted file mode 100644 index 07aa3293..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/enums.py +++ /dev/null @@ -1,21 +0,0 @@ -from enum import Enum - -import pymongo - - -class SortDirection(int, Enum): - """ - Sorting directions - """ - - ASCENDING = pymongo.ASCENDING - DESCENDING = pymongo.DESCENDING - - -class InspectionStatuses(str, Enum): - """ - Statuses of the collection inspection - """ - - FAIL = "FAIL" - OK = "OK" diff --git a/venv/lib/python3.12/site-packages/beanie/odm/fields.py b/venv/lib/python3.12/site-packages/beanie/odm/fields.py deleted file mode 100644 index 283fdca9..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/fields.py +++ /dev/null @@ -1,711 +0,0 @@ -from __future__ import annotations - -import asyncio -from collections import OrderedDict -from dataclasses import dataclass -from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generic, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) -from typing import OrderedDict as OrderedDictType - -from bson import DBRef, ObjectId -from bson.errors import InvalidId -from pydantic import BaseModel -from pymongo import ASCENDING, IndexModel -from typing_extensions import get_args - -from beanie.odm.enums import SortDirection -from beanie.odm.operators.find.comparison import ( - GT, - GTE, - LT, - LTE, - NE, - Eq, - In, -) -from beanie.odm.registry import DocsRegistry -from beanie.odm.utils.parsing import parse_obj -from beanie.odm.utils.pydantic import ( - IS_PYDANTIC_V2, - get_field_type, - get_model_fields, - parse_object_as, -) - -if IS_PYDANTIC_V2: - from pydantic import ( - GetCoreSchemaHandler, - GetJsonSchemaHandler, - TypeAdapter, - ) - from pydantic.json_schema import JsonSchemaValue - from pydantic_core import core_schema - from pydantic_core.core_schema import CoreSchema, ValidationInfo -else: - from pydantic.fields import ModelField - from pydantic.json import ENCODERS_BY_TYPE - -if TYPE_CHECKING: - from beanie.odm.documents import DocType - - -@dataclass(frozen=True) -class IndexedAnnotation: - _indexed: Tuple[int, Dict[str, Any]] - - -def Indexed(typ=None, index_type=ASCENDING, **kwargs: Any): - """ - If `typ` is defined, returns a subclass of `typ` with an extra attribute - `_indexed` as a tuple: - - Index 0: `index_type` such as `pymongo.ASCENDING` - - Index 1: `kwargs` passed to `IndexModel` - When instantiated the type of the result will actually be `typ`. - - When `typ` is not defined, returns an `IndexedAnnotation` instance, to be - used as metadata in `Annotated` fields. - - Example: - ```py - # Both fields would have the same behavior - class MyModel(BaseModel): - field1: Indexed(str, unique=True) - field2: Annotated[str, Indexed(unique=True)] - ``` - """ - if typ is None: - return IndexedAnnotation(_indexed=(index_type, kwargs)) - - class NewType(typ): - _indexed = (index_type, kwargs) - - def __new__(cls, *args: Any, **kwargs: Any): - return typ.__new__(typ, *args, **kwargs) - - if IS_PYDANTIC_V2: - - @classmethod - def __get_pydantic_core_schema__( - cls, _source_type: Type[Any], _handler: GetCoreSchemaHandler - ) -> CoreSchema: - custom_type = getattr( - typ, "__get_pydantic_core_schema__", None - ) - if custom_type is not None: - return custom_type(_source_type, _handler) - - return core_schema.no_info_after_validator_function( - lambda v: v, core_schema.simple_ser_schema(typ.__name__) - ) - - NewType.__name__ = f"Indexed {typ.__name__}" - return NewType - - -class PydanticObjectId(ObjectId): - """ - Object Id field. Compatible with Pydantic. - """ - - @classmethod - def _validate(cls, v): - if isinstance(v, bytes): - v = v.decode("utf-8") - try: - return PydanticObjectId(v) - except (InvalidId, TypeError): - raise ValueError("Id must be of type PydanticObjectId") - - if IS_PYDANTIC_V2: - - @classmethod - def __get_pydantic_core_schema__( - cls, source_type: Type[Any], handler: GetCoreSchemaHandler - ) -> CoreSchema: - definition = core_schema.definition_reference_schema( - "PydanticObjectId" - ) # used for deduplication - - return core_schema.definitions_schema( - definition, - [ - core_schema.json_or_python_schema( - python_schema=core_schema.no_info_plain_validator_function( - cls._validate - ), - json_schema=core_schema.no_info_after_validator_function( - cls._validate, - core_schema.str_schema( - pattern="^[0-9a-f]{24}$", - min_length=24, - max_length=24, - ), - ), - serialization=core_schema.plain_serializer_function_ser_schema( - lambda instance: str(instance), when_used="json" - ), - ref=definition["schema_ref"], - ) - ], - ) - - @classmethod - def __get_pydantic_json_schema__( - cls, - schema: core_schema.CoreSchema, - handler: GetJsonSchemaHandler, # type: ignore - ) -> JsonSchemaValue: - """ - Results such schema: - ```json - { - "components": { - "schemas": { - "Item": { - "properties": { - "id": { - "$ref": "#/components/schemas/PydanticObjectId" - } - }, - "type": "object", - "title": "Item" - }, - "PydanticObjectId": { - "type": "string", - "maxLength": 24, - "minLength": 24, - "pattern": "^[0-9a-f]{24}$", - "example": "5eb7cf5a86d9755df3a6c593" - } - } - } - } - ``` - """ - - json_schema = handler(schema) - schema_to_update = handler.resolve_ref_schema(json_schema) - schema_to_update.update(example="5eb7cf5a86d9755df3a6c593") - return json_schema - - else: - - @classmethod - def __get_validators__(cls): - yield cls._validate - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]): - field_schema.update( - type="string", - example="5eb7cf5a86d9755df3a6c593", - ) - - -if not IS_PYDANTIC_V2: - ENCODERS_BY_TYPE[PydanticObjectId] = ( - str # it is a workaround to force pydantic make json schema for this field - ) - -BeanieObjectId = PydanticObjectId - - -class ExpressionField(str): - def __getitem__(self, item): - """ - Get sub field - - :param item: name of the subfield - :return: ExpressionField - """ - return ExpressionField(f"{self}.{item}") - - def __getattr__(self, item): - """ - Get sub field - - :param item: name of the subfield - :return: ExpressionField - """ - return ExpressionField(f"{self}.{item}") - - def __hash__(self): - return hash(str(self)) - - def __eq__(self, other): - if isinstance(other, ExpressionField): - return super(ExpressionField, self).__eq__(other) - return Eq(field=self, other=other) - - def __gt__(self, other): - return GT(field=self, other=other) - - def __ge__(self, other): - return GTE(field=self, other=other) - - def __lt__(self, other): - return LT(field=self, other=other) - - def __le__(self, other): - return LTE(field=self, other=other) - - def __ne__(self, other): - return NE(field=self, other=other) - - def __pos__(self): - return self, SortDirection.ASCENDING - - def __neg__(self): - return self, SortDirection.DESCENDING - - def __copy__(self): - return self - - def __deepcopy__(self, memo): - return self - - -class DeleteRules(str, Enum): - DO_NOTHING = "DO_NOTHING" - DELETE_LINKS = "DELETE_LINKS" - - -class WriteRules(str, Enum): - DO_NOTHING = "DO_NOTHING" - WRITE = "WRITE" - - -class LinkTypes(str, Enum): - DIRECT = "DIRECT" - OPTIONAL_DIRECT = "OPTIONAL_DIRECT" - LIST = "LIST" - OPTIONAL_LIST = "OPTIONAL_LIST" - - BACK_DIRECT = "BACK_DIRECT" - BACK_LIST = "BACK_LIST" - OPTIONAL_BACK_DIRECT = "OPTIONAL_BACK_DIRECT" - OPTIONAL_BACK_LIST = "OPTIONAL_BACK_LIST" - - -class LinkInfo(BaseModel): - field_name: str - lookup_field_name: str - document_class: Type[BaseModel] # Document class - link_type: LinkTypes - nested_links: Optional[Dict] = None - is_fetchable: bool = True - - -T = TypeVar("T") - - -class Link(Generic[T]): - def __init__(self, ref: DBRef, document_class: Type[T]): - self.ref = ref - self.document_class = document_class - - async def fetch(self, fetch_links: bool = False) -> Union[T, Link[T]]: - result = await self.document_class.get( # type: ignore - self.ref.id, with_children=True, fetch_links=fetch_links - ) - return result or self - - @classmethod - async def fetch_one(cls, link: Link[T]): - return await link.fetch() - - @classmethod - async def fetch_list( - cls, - links: List[Union[Link[T], DocType]], - fetch_links: bool = False, - ): - """ - Fetch list that contains links and documents - :param links: - :param fetch_links: - :return: - """ - data = Link.repack_links(links) # type: ignore - ids_to_fetch = [] - document_class = None - for doc_id, link in data.items(): - if isinstance(link, Link): - if document_class is None: - document_class = link.document_class - else: - if document_class != link.document_class: - raise ValueError( - "All the links must have the same model class" - ) - ids_to_fetch.append(link.ref.id) - - if ids_to_fetch: - fetched_models = await document_class.find( # type: ignore - In("_id", ids_to_fetch), - with_children=True, - fetch_links=fetch_links, - ).to_list() - - for model in fetched_models: - data[model.id] = model - - return list(data.values()) - - @staticmethod - def repack_links( - links: List[Union[Link[T], DocType]], - ) -> OrderedDictType[Any, Any]: - result = OrderedDict() - for link in links: - if isinstance(link, Link): - result[link.ref.id] = link - else: - result[link.id] = link - return result - - @classmethod - async def fetch_many(cls, links: List[Link[T]]) -> List[Union[T, Link[T]]]: - coros = [] - for link in links: - coros.append(link.fetch()) - return await asyncio.gather(*coros) - - if IS_PYDANTIC_V2: - - @staticmethod - def serialize(value: Union[Link[T], BaseModel]): - if isinstance(value, Link): - return value.to_dict() - return value.model_dump(mode="json") - - @classmethod - def wrapped_validate( - cls, source_type: Type[Any], handler: GetCoreSchemaHandler - ): - def validate( - v: Union[Link[T], T, DBRef, dict[str, Any]], - validation_info: ValidationInfo, - ) -> Link[T] | T: - document_class = DocsRegistry.evaluate_fr( # type: ignore - get_args(source_type)[0] - ) - - if isinstance(v, DBRef): - return cls(ref=v, document_class=document_class) - if isinstance(v, Link): - return v - if isinstance(v, dict) and v.keys() == {"id", "collection"}: - return cls( - ref=DBRef( - collection=v["collection"], - id=TypeAdapter( - document_class.model_fields["id"].annotation - ).validate_python(v["id"]), - ), - document_class=document_class, - ) - if isinstance(v, dict) or isinstance(v, BaseModel): - return parse_obj(document_class, v) - - # Default fallback case for unknown type - new_id = TypeAdapter( - document_class.model_fields["id"].annotation - ).validate_python(v) - ref = DBRef( - collection=document_class.get_collection_name(), id=new_id - ) - return cls(ref=ref, document_class=document_class) - - return validate - - @classmethod - def __get_pydantic_core_schema__( - cls, source_type: Type[Any], handler: GetCoreSchemaHandler - ) -> CoreSchema: - return core_schema.json_or_python_schema( - python_schema=core_schema.with_info_plain_validator_function( - cls.wrapped_validate(source_type, handler) - ), - json_schema=core_schema.union_schema( - [ - core_schema.typed_dict_schema( - { - "id": core_schema.typed_dict_field( - core_schema.str_schema() - ), - "collection": core_schema.typed_dict_field( - core_schema.str_schema() - ), - } - ), - core_schema.dict_schema( - keys_schema=core_schema.str_schema(), - values_schema=core_schema.any_schema(), - ), - ] - ), - serialization=core_schema.plain_serializer_function_ser_schema( - function=lambda instance: cls.serialize(instance), - when_used="json-unless-none", - ), - ) - - else: - - @classmethod - def __get_validators__(cls): - yield cls._validate - - @classmethod - def _validate( - cls, - v: Union[Link[T], T, DBRef, dict[str, Any]], - field: ModelField, - ) -> Link[T] | T: - document_class = DocsRegistry.evaluate_fr( # type: ignore - field.sub_fields[0].type_ - ) - - if isinstance(v, DBRef): - return cls(ref=v, document_class=document_class) - if isinstance(v, Link): - return v - if isinstance(v, dict) or isinstance(v, BaseModel): - return parse_obj(document_class, v) - - # Default fallback case for unknown type - new_id = parse_object_as( - get_field_type(get_model_fields(document_class)["id"]), v - ) - ref = DBRef( - collection=document_class.get_collection_name(), id=new_id - ) - return cls(ref=ref, document_class=document_class) - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]): - field_schema.clear() - field_schema.update( - { - "anyOf": [ - { - "properties": { - "id": {"type": "string", "title": "Id"}, - "collection": { - "type": "string", - "title": "Collection", - }, - }, - "type": "object", - "required": ["id", "collection"], - }, - {"type": "object"}, - ], - } - ) - - def to_ref(self): - return self.ref - - def to_dict(self): - return {"id": str(self.ref.id), "collection": self.ref.collection} - - -if not IS_PYDANTIC_V2: - ENCODERS_BY_TYPE[Link] = lambda o: o.to_dict() - - -class BackLink(Generic[T]): - """Back reference to a document""" - - def __init__(self, document_class: Type[T]): - self.document_class = document_class - - if IS_PYDANTIC_V2: - - @classmethod - def wrapped_validate( - cls, source_type: Type[Any], handler: GetCoreSchemaHandler - ): - def validate( - v: Union[T, dict[str, Any]], validation_info: ValidationInfo - ) -> BackLink[T] | T: - document_class = DocsRegistry.evaluate_fr( # type: ignore - get_args(source_type)[0] - ) - if isinstance(v, dict) or isinstance(v, BaseModel): - return parse_obj(document_class, v) - return cls(document_class=document_class) - - return validate - - @classmethod - def __get_pydantic_core_schema__( - cls, source_type: Type[Any], handler: GetCoreSchemaHandler - ) -> CoreSchema: - # NOTE: BackLinks are only virtual fields, they shouldn't be serialized nor appear in the schema. - return core_schema.json_or_python_schema( - python_schema=core_schema.with_info_plain_validator_function( - cls.wrapped_validate(source_type, handler) - ), - json_schema=core_schema.dict_schema( - keys_schema=core_schema.str_schema(), - values_schema=core_schema.any_schema(), - ), - serialization=core_schema.plain_serializer_function_ser_schema( - lambda instance: cls.to_dict(instance), - return_schema=core_schema.dict_schema(), - when_used="json-unless-none", - ), - ) - - else: - - @classmethod - def __get_validators__(cls): - yield cls._validate - - @classmethod - def _validate( - cls, v: Union[T, dict[str, Any]], field: ModelField - ) -> BackLink[T] | T: - document_class = DocsRegistry.evaluate_fr( # type: ignore - field.sub_fields[0].type_ - ) - if isinstance(v, dict) or isinstance(v, BaseModel): - return parse_obj(document_class, v) - return cls(document_class=document_class) - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]): - field_schema.clear() - field_schema.update( - { - "anyOf": [ - { - "properties": { - "id": {"type": "string", "title": "Id"}, - "collection": { - "type": "string", - "title": "Collection", - }, - }, - "type": "object", - "required": ["id", "collection"], - }, - {"type": "object"}, - ], - } - ) - - def to_dict(self) -> dict[str, str]: - document_class = DocsRegistry.evaluate_fr(self.document_class) # type: ignore - return {"collection": document_class.get_collection_name()} - - -if not IS_PYDANTIC_V2: - ENCODERS_BY_TYPE[BackLink] = lambda o: o.to_dict() - - -class IndexModelField: - def __init__(self, index: IndexModel): - self.index = index - self.name = index.document["name"] - - self.fields = tuple(sorted(self.index.document["key"])) - self.options = tuple( - sorted( - (k, v) - for k, v in self.index.document.items() - if k not in ["key", "v"] - ) - ) - - def __eq__(self, other): - return self.fields == other.fields and self.options == other.options - - def __repr__(self): - return f"IndexModelField({self.name}, {self.fields}, {self.options})" - - @staticmethod - def list_difference( - left: List[IndexModelField], right: List[IndexModelField] - ): - result = [] - for index in left: - if index not in right: - result.append(index) - return result - - @staticmethod - def list_to_index_model(left: List[IndexModelField]): - return [index.index for index in left] - - @classmethod - def from_pymongo_index_information(cls, index_info: dict): - result = [] - for name, details in index_info.items(): - fields = details["key"] - if ("_id", 1) in fields: - continue - - options = {k: v for k, v in details.items() if k != "key"} - index_model = IndexModelField( - IndexModel(fields, name=name, **options) - ) - result.append(index_model) - return result - - def same_fields(self, other: IndexModelField): - return self.fields == other.fields - - @staticmethod - def find_index_with_the_same_fields( - indexes: List[IndexModelField], index: IndexModelField - ): - for i in indexes: - if i.same_fields(index): - return i - return None - - @staticmethod - def merge_indexes( - left: List[IndexModelField], right: List[IndexModelField] - ): - left_dict = {index.fields: index for index in left} - right_dict = {index.fields: index for index in right} - left_dict.update(right_dict) - return list(left_dict.values()) - - @classmethod - def _validate(cls, v: Any) -> "IndexModelField": - if isinstance(v, IndexModel): - return IndexModelField(v) - else: - return IndexModelField(IndexModel(v)) - - if IS_PYDANTIC_V2: - - @classmethod - def __get_pydantic_core_schema__( - cls, source_type: Type[Any], handler: GetCoreSchemaHandler - ) -> CoreSchema: - return core_schema.no_info_plain_validator_function(cls._validate) - - else: - - @classmethod - def __get_validators__(cls): - yield cls._validate diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index fb9cea84..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/aggregate.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/aggregate.cpython-312.pyc deleted file mode 100644 index af7d1b63..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/aggregate.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/aggregation_methods.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/aggregation_methods.cpython-312.pyc deleted file mode 100644 index b36410d8..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/aggregation_methods.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/clone.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/clone.cpython-312.pyc deleted file mode 100644 index dcf29780..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/clone.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/detector.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/detector.cpython-312.pyc deleted file mode 100644 index 78284d97..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/detector.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/find.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/find.cpython-312.pyc deleted file mode 100644 index 1a018e20..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/find.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/getters.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/getters.cpython-312.pyc deleted file mode 100644 index bd944d39..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/getters.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/inheritance.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/inheritance.cpython-312.pyc deleted file mode 100644 index 0288df62..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/inheritance.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/session.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/session.cpython-312.pyc deleted file mode 100644 index c22acb21..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/session.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/setters.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/setters.cpython-312.pyc deleted file mode 100644 index 3e586fc5..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/setters.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/update.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/update.cpython-312.pyc deleted file mode 100644 index 25818d38..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/__pycache__/update.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/aggregate.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/aggregate.py deleted file mode 100644 index 6ee503f8..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/aggregate.py +++ /dev/null @@ -1,70 +0,0 @@ -from abc import abstractmethod -from typing import Any, Dict, Optional, Type, TypeVar, Union, overload - -from pydantic import BaseModel -from pymongo.asynchronous.client_session import AsyncClientSession - -from beanie.odm.queries.aggregation import AggregationQuery -from beanie.odm.queries.find import FindMany - -DocType = TypeVar("DocType", bound="AggregateInterface") -DocumentProjectionType = TypeVar("DocumentProjectionType", bound=BaseModel) - - -class AggregateInterface: - @classmethod - @abstractmethod - def find_all(cls) -> FindMany: - pass - - @overload - @classmethod - def aggregate( - cls: Type[DocType], - aggregation_pipeline: list, - projection_model: None = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - **pymongo_kwargs: Any, - ) -> AggregationQuery[Dict[str, Any]]: ... - - @overload - @classmethod - def aggregate( - cls: Type[DocType], - aggregation_pipeline: list, - projection_model: Type[DocumentProjectionType], - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - **pymongo_kwargs: Any, - ) -> AggregationQuery[DocumentProjectionType]: ... - - @classmethod - def aggregate( - cls: Type[DocType], - aggregation_pipeline: list, - projection_model: Optional[Type[DocumentProjectionType]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - **pymongo_kwargs: Any, - ) -> Union[ - AggregationQuery[Dict[str, Any]], - AggregationQuery[DocumentProjectionType], - ]: - """ - Aggregate over collection. - Returns [AggregationQuery](query.md#aggregationquery) query object - :param aggregation_pipeline: list - aggregation pipeline - :param projection_model: Type[BaseModel] - :param session: Optional[AsyncClientSession] - pymongo session. - :param ignore_cache: bool - :param **pymongo_kwargs: pymongo native parameters for aggregate operation - :return: [AggregationQuery](query.md#aggregationquery) - """ - return cls.find_all().aggregate( - aggregation_pipeline=aggregation_pipeline, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - **pymongo_kwargs, - ) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/aggregation_methods.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/aggregation_methods.py deleted file mode 100644 index fe9e6705..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/aggregation_methods.py +++ /dev/null @@ -1,189 +0,0 @@ -from abc import abstractmethod -from typing import Any, Dict, List, Optional, Union, cast - -from pymongo.asynchronous.client_session import AsyncClientSession - -from beanie.odm.fields import ExpressionField - - -class AggregateMethods: - """ - Aggregate methods - """ - - @abstractmethod - def aggregate( - self, - aggregation_pipeline, - projection_model=None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - ): ... - - async def sum( - self, - field: Union[str, ExpressionField], - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - ) -> Optional[float]: - """ - Sum of values of the given field - - Example: - - ```python - - class Sample(Document): - price: int - count: int - - sum_count = await Document.find(Sample.price <= 100).sum(Sample.count) - - ``` - - :param field: Union[str, ExpressionField] - :param session: Optional[AsyncClientSession] - pymongo session - :param ignore_cache: bool - :return: float - sum. None if there are no items. - """ - pipeline = [ - {"$group": {"_id": None, "sum": {"$sum": f"${field}"}}}, - {"$project": {"_id": 0, "sum": 1}}, - ] - - # As we did not supply a projection we can safely cast the type (hinting to mypy that we know the type) - result: List[Dict[str, Any]] = cast( - List[Dict[str, Any]], - await self.aggregate( - aggregation_pipeline=pipeline, - session=session, - ignore_cache=ignore_cache, - ).to_list(), # type: ignore # TODO: pyright issue, fix - ) - if not result: - return None - return result[0]["sum"] - - async def avg( - self, - field, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - ) -> Optional[float]: - """ - Average of values of the given field - - Example: - - ```python - - class Sample(Document): - price: int - count: int - - avg_count = await Document.find(Sample.price <= 100).avg(Sample.count) - ``` - - :param field: Union[str, ExpressionField] - :param session: Optional[AsyncClientSession] - pymongo session - :param ignore_cache: bool - :return: Optional[float] - avg. None if there are no items. - """ - pipeline = [ - {"$group": {"_id": None, "avg": {"$avg": f"${field}"}}}, - {"$project": {"_id": 0, "avg": 1}}, - ] - - result: List[Dict[str, Any]] = cast( - List[Dict[str, Any]], - await self.aggregate( - aggregation_pipeline=pipeline, - session=session, - ignore_cache=ignore_cache, - ).to_list(), # type: ignore # TODO: pyright issue, fix - ) - if not result: - return None - return result[0]["avg"] - - async def max( - self, - field: Union[str, ExpressionField], - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - ) -> Optional[float]: - """ - Max of the values of the given field - - Example: - - ```python - - class Sample(Document): - price: int - count: int - - max_count = await Document.find(Sample.price <= 100).max(Sample.count) - ``` - - :param field: Union[str, ExpressionField] - :param session: Optional[AsyncClientSession] - pymongo session - :return: float - max. None if there are no items. - """ - pipeline = [ - {"$group": {"_id": None, "max": {"$max": f"${field}"}}}, - {"$project": {"_id": 0, "max": 1}}, - ] - - result: List[Dict[str, Any]] = cast( - List[Dict[str, Any]], - await self.aggregate( - aggregation_pipeline=pipeline, - session=session, - ignore_cache=ignore_cache, - ).to_list(), # type: ignore # TODO: pyright issue, fix - ) - if not result: - return None - return result[0]["max"] - - async def min( - self, - field: Union[str, ExpressionField], - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - ) -> Optional[float]: - """ - Min of the values of the given field - - Example: - - ```python - - class Sample(Document): - price: int - count: int - - min_count = await Document.find(Sample.price <= 100).min(Sample.count) - ``` - - :param field: Union[str, ExpressionField] - :param session: Optional[AsyncClientSession] - pymongo session - :return: float - min. None if there are no items. - """ - pipeline = [ - {"$group": {"_id": None, "min": {"$min": f"${field}"}}}, - {"$project": {"_id": 0, "min": 1}}, - ] - - result: List[Dict[str, Any]] = cast( - List[Dict[str, Any]], - await self.aggregate( - aggregation_pipeline=pipeline, - session=session, - ignore_cache=ignore_cache, - ).to_list(), # type: ignore # TODO: pyright issue, fix - ) - if not result: - return None - return result[0]["min"] diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/clone.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/clone.py deleted file mode 100644 index 9ff374d7..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/clone.py +++ /dev/null @@ -1,6 +0,0 @@ -from copy import deepcopy - - -class CloneInterface: - def clone(self): - return deepcopy(self) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/detector.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/detector.py deleted file mode 100644 index f04abf6e..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/detector.py +++ /dev/null @@ -1,13 +0,0 @@ -from enum import Enum - - -class ModelType(str, Enum): - Document = "Document" - View = "View" - UnionDoc = "UnionDoc" - - -class DetectionInterface: - @classmethod - def get_model_type(cls) -> ModelType: - return ModelType.Document diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/find.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/find.py deleted file mode 100644 index 516ba796..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/find.py +++ /dev/null @@ -1,469 +0,0 @@ -from abc import abstractmethod -from collections.abc import Iterable -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Dict, - List, - Mapping, - Optional, - Tuple, - Type, - TypeVar, - Union, - overload, -) - -from pydantic import ( - BaseModel, -) -from pymongo.asynchronous.client_session import AsyncClientSession - -from beanie.odm.enums import SortDirection -from beanie.odm.interfaces.detector import ModelType -from beanie.odm.queries.find import FindMany, FindOne -from beanie.odm.settings.base import ItemSettings - -if TYPE_CHECKING: - from beanie.odm.documents import Document - from beanie.odm.union_doc import UnionDoc - from beanie.odm.views import View - -DocumentProjectionType = TypeVar("DocumentProjectionType", bound=BaseModel) -FindType = TypeVar("FindType", bound=Union["Document", "UnionDoc", "View"]) - - -class FindInterface: - # Customization - # Query builders could be replaced in the inherited classes - _find_one_query_class: ClassVar[Type] = FindOne - _find_many_query_class: ClassVar[Type] = FindMany - - _inheritance_inited: bool = False - _class_id: ClassVar[Optional[str]] - _children: ClassVar[Dict[str, Type]] - - @classmethod - @abstractmethod - def get_model_type(cls) -> ModelType: - pass - - @classmethod - @abstractmethod - def get_settings(cls) -> ItemSettings: - pass - - @overload - @classmethod - def find_one( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: None = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindOne[FindType]: ... - - @overload - @classmethod - def find_one( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Type["DocumentProjectionType"], - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindOne["DocumentProjectionType"]: ... - - @classmethod - def find_one( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindOne[FindType], FindOne["DocumentProjectionType"]]: - """ - Find one document by criteria. - Returns [FindOne](query.md#findone) query object. - When awaited this will either return a document or None if no document exists for the search criteria. - - :param args: *Mapping[Any, Any] - search criteria - :param projection_model: Optional[Type[BaseModel]] - projection model - :param session: Optional[AsyncClientSession] - pymongo session. - :param ignore_cache: bool - :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) - :return: [FindOne](query.md#findone) - find query instance - """ - args = cls._add_class_id_filter(args, with_children) - return cls._find_one_query_class(document_model=cls).find_one( - *args, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - @overload - @classmethod - def find_many( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: None = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany[FindType]: ... - - @overload - @classmethod - def find_many( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany["DocumentProjectionType"]: ... - - @classmethod - def find_many( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: - """ - Find many documents by criteria. - Returns [FindMany](query.md#findmany) query object - - :param args: *Mapping[Any, Any] - search criteria - :param skip: Optional[int] - The number of documents to omit. - :param limit: Optional[int] - The maximum number of results to return. - :param sort: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. - :param projection_model: Optional[Type[BaseModel]] - projection model - :param session: Optional[AsyncClientSession] - pymongo session. - :param ignore_cache: bool - :param lazy_parse: bool - :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) - :return: [FindMany](query.md#findmany) - query instance - """ - args = cls._add_class_id_filter(args, with_children) - return cls._find_many_query_class(document_model=cls).find_many( - *args, - sort=sort, - skip=skip, - limit=limit, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - lazy_parse=lazy_parse, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - @overload - @classmethod - def find( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: None = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany[FindType]: ... - - @overload - @classmethod - def find( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Type["DocumentProjectionType"], - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany["DocumentProjectionType"]: ... - - @classmethod - def find( # type: ignore - cls: Type[FindType], - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: - """ - The same as find_many - """ - return cls.find_many( - *args, - skip=skip, - limit=limit, - sort=sort, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - with_children=with_children, - lazy_parse=lazy_parse, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - @overload - @classmethod - def find_all( # type: ignore - cls: Type[FindType], - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - projection_model: None = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany[FindType]: ... - - @overload - @classmethod - def find_all( # type: ignore - cls: Type[FindType], - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - projection_model: Optional[Type["DocumentProjectionType"]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany["DocumentProjectionType"]: ... - - @classmethod - def find_all( # type: ignore - cls: Type[FindType], - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - projection_model: Optional[Type["DocumentProjectionType"]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: - """ - Get all the documents - - :param skip: Optional[int] - The number of documents to omit. - :param limit: Optional[int] - The maximum number of results to return. - :param sort: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. - :param projection_model: Optional[Type[BaseModel]] - projection model - :param session: Optional[AsyncClientSession] - pymongo session. - :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) - :return: [FindMany](query.md#findmany) - query instance - """ - return cls.find_many( - {}, - skip=skip, - limit=limit, - sort=sort, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - with_children=with_children, - lazy_parse=lazy_parse, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - @overload - @classmethod - def all( # type: ignore - cls: Type[FindType], - projection_model: None = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany[FindType]: ... - - @overload - @classmethod - def all( # type: ignore - cls: Type[FindType], - projection_model: Type["DocumentProjectionType"], - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> FindMany["DocumentProjectionType"]: ... - - @classmethod - def all( # type: ignore - cls: Type[FindType], - projection_model: Optional[Type["DocumentProjectionType"]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - with_children: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: - """ - the same as find_all - """ - return cls.find_all( - skip=skip, - limit=limit, - sort=sort, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - with_children=with_children, - lazy_parse=lazy_parse, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - @classmethod - async def count(cls) -> int: - """ - Number of documents in the collections - The same as find_all().count() - - :return: int - """ - return await cls.find_all().count() # type: ignore - - @classmethod - def _add_class_id_filter(cls, args: Tuple, with_children: bool = False): - # skip if _class_id is already added - if any( - ( - True - for a in args - if isinstance(a, Iterable) and cls.get_settings().class_id in a - ) - ): - return args - - if ( - cls.get_model_type() == ModelType.Document - and cls._inheritance_inited - ): - if not with_children: - args += ({cls.get_settings().class_id: cls._class_id},) - else: - args += ( - { - cls.get_settings().class_id: { - "$in": [cls._class_id] - + [cname for cname in cls._children.keys()] - } - }, - ) - - if cls.get_settings().union_doc: - args += ( - { - cls.get_settings().class_id: cls.get_settings().union_doc_alias - }, - ) - return args diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/getters.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/getters.py deleted file mode 100644 index fd9a9a19..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/getters.py +++ /dev/null @@ -1,28 +0,0 @@ -from abc import abstractmethod - -from pymongo.asynchronous.collection import AsyncCollection - -from beanie.odm.settings.base import ItemSettings - - -class OtherGettersInterface: - @classmethod - @abstractmethod - def get_settings(cls) -> ItemSettings: - pass - - @classmethod - def get_pymongo_collection(cls) -> AsyncCollection: - return cls.get_settings().pymongo_collection - - @classmethod - def get_collection_name(cls) -> str: - return cls.get_settings().name # type: ignore - - @classmethod - def get_bson_encoders(cls): - return cls.get_settings().bson_encoders - - @classmethod - def get_link_fields(cls): - return None diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/inheritance.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/inheritance.py deleted file mode 100644 index 60e4d672..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/inheritance.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import ( - ClassVar, - Dict, - Optional, - Type, -) - - -class InheritanceInterface: - _children: ClassVar[Dict[str, Type]] - _parent: ClassVar[Optional[Type]] - _inheritance_inited: ClassVar[bool] - _class_id: ClassVar[Optional[str]] = None - - @classmethod - def add_child(cls, name: str, clas: Type): - cls._children[name] = clas - if cls._parent is not None: - cls._parent.add_child(name, clas) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/session.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/session.py deleted file mode 100644 index e751d05a..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/session.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Optional - -from pymongo.asynchronous.client_session import AsyncClientSession - - -class SessionMethods: - """ - Session methods - """ - - def set_session(self, session: Optional[AsyncClientSession] = None): - """ - Set session - :param session: Optional[AsyncClientSession] - pymongo session - :return: - """ - if session is not None: - self.session: Optional[AsyncClientSession] = session - return self diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/setters.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/setters.py deleted file mode 100644 index 0cc2be9a..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/setters.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import ClassVar, Optional - -from beanie.odm.settings.document import DocumentSettings - - -class SettersInterface: - _document_settings: ClassVar[Optional[DocumentSettings]] - - @classmethod - def set_collection(cls, collection): - """ - Collection setter - """ - cls._document_settings.pymongo_collection = collection - - @classmethod - def set_database(cls, database): - """ - Database setter - """ - cls._document_settings.pymongo_db = database - - @classmethod - def set_collection_name(cls, name: str): - """ - Collection name setter - """ - cls._document_settings.name = name # type: ignore diff --git a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/update.py b/venv/lib/python3.12/site-packages/beanie/odm/interfaces/update.py deleted file mode 100644 index fb230d3d..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/interfaces/update.py +++ /dev/null @@ -1,118 +0,0 @@ -from abc import abstractmethod -from datetime import datetime -from typing import Any, Dict, Mapping, Optional, Union - -from pymongo.asynchronous.client_session import AsyncClientSession - -from beanie.odm.bulk import BulkWriter -from beanie.odm.fields import ExpressionField -from beanie.odm.operators.update.general import ( - CurrentDate, - Inc, - Set, -) - - -class UpdateMethods: - """ - Update methods - """ - - @abstractmethod - def update( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **kwargs: Any, - ): - return self - - def set( - self, - expression: Dict[Union[ExpressionField, str, Any], Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **kwargs: Any, - ): - """ - Set values - - Example: - - ```python - - class Sample(Document): - one: int - - await Document.find(Sample.one == 1).set({Sample.one: 100}) - - ``` - - Uses [Set operator](operators/update.md#set) - - :param expression: Dict[Union[ExpressionField, str, Any], Any] - keys and - values to set - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - bulk writer - :return: self - """ - return self.update( - Set(expression), session=session, bulk_writer=bulk_writer, **kwargs - ) - - def current_date( - self, - expression: Dict[Union[datetime, ExpressionField, str], Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **kwargs: Any, - ): - """ - Set current date - - Uses [CurrentDate operator](operators/update.md#currentdate) - - :param expression: Dict[Union[datetime, ExpressionField, str], Any] - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - bulk writer - :return: self - """ - return self.update( - CurrentDate(expression), - session=session, - bulk_writer=bulk_writer, - **kwargs, - ) - - def inc( - self, - expression: Dict[Union[ExpressionField, float, int, str], Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **kwargs: Any, - ): - """ - Increment - - Example: - - ```python - - class Sample(Document): - one: int - - await Document.find(Sample.one == 1).inc({Sample.one: 100}) - - ``` - - Uses [Inc operator](operators/update.md#inc) - - :param expression: Dict[Union[ExpressionField, float, int, str], Any] - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - bulk writer - :return: self - """ - return self.update( - Inc(expression), session=session, bulk_writer=bulk_writer, **kwargs - ) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/models.py b/venv/lib/python3.12/site-packages/beanie/odm/models.py deleted file mode 100644 index 6515e08e..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/models.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import List - -from pydantic import BaseModel - -from beanie.odm.enums import InspectionStatuses -from beanie.odm.fields import PydanticObjectId - - -class InspectionError(BaseModel): - """ - Inspection error details - """ - - document_id: PydanticObjectId - error: str - - -class InspectionResult(BaseModel): - """ - Collection inspection result - """ - - status: InspectionStatuses = InspectionStatuses.OK - errors: List[InspectionError] = [] diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/__init__.py deleted file mode 100644 index 5145f8a3..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from abc import abstractmethod -from collections.abc import Mapping -from copy import copy, deepcopy -from typing import Any, Dict -from typing import Mapping as MappingType - - -class BaseOperator(Mapping): - """ - Base operator. - """ - - @property - @abstractmethod - def query(self) -> MappingType[str, Any]: ... - - def __getitem__(self, item: str): - return self.query[item] - - def __iter__(self): - return iter(self.query) - - def __len__(self): - return len(self.query) - - def __repr__(self): - return repr(self.query) - - def __str__(self): - return str(self.query) - - def __copy__(self): - return copy(self.query) - - def __deepcopy__(self, memodict: Dict[str, Any] = {}): - return deepcopy(self.query) - - def copy(self): - return copy(self) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 65cc9593..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__init__.py deleted file mode 100644 index 281cc7f4..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from abc import ABC - -from beanie.odm.operators import BaseOperator - - -class BaseFindOperator(BaseOperator, ABC): ... diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index abfcce9c..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/array.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/array.cpython-312.pyc deleted file mode 100644 index 556771a6..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/array.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/bitwise.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/bitwise.cpython-312.pyc deleted file mode 100644 index 9025ff75..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/bitwise.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/comparison.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/comparison.cpython-312.pyc deleted file mode 100644 index b36b83c7..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/comparison.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/element.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/element.cpython-312.pyc deleted file mode 100644 index 7e5c7a3f..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/element.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/evaluation.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/evaluation.cpython-312.pyc deleted file mode 100644 index 69617700..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/evaluation.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/geospatial.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/geospatial.cpython-312.pyc deleted file mode 100644 index feb70d4c..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/geospatial.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/logical.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/logical.cpython-312.pyc deleted file mode 100644 index 0a6d88cf..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/__pycache__/logical.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/array.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/array.py deleted file mode 100644 index dd924ec5..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/array.py +++ /dev/null @@ -1,120 +0,0 @@ -from abc import ABC -from typing import Any, Optional - -from beanie.odm.operators.find import BaseFindOperator - - -class BaseFindArrayOperator(BaseFindOperator, ABC): ... - - -class All(BaseFindArrayOperator): - """ - `$all` array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - All(Sample.results, [80, 85]) - ``` - - Will return query object like - - ```python - {"results": {"$all": [80, 85]}} - ``` - - MongoDB doc: - - """ - - def __init__( - self, - field, - values: list, - ): - self.field = field - self.values_list = values - - @property - def query(self): - return {self.field: {"$all": self.values_list}} - - -class ElemMatch(BaseFindArrayOperator): - """ - `$elemMatch` array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - ElemMatch(Sample.results, {"$in": [80, 85]}) - ``` - - Will return query object like - - ```python - {"results": {"$elemMatch": {"$in": [80, 85]}}} - ``` - - MongoDB doc: - - """ - - def __init__( - self, - field, - expression: Optional[dict] = None, - **kwargs: Any, - ): - self.field = field - - if expression is None: - self.expression = kwargs - else: - self.expression = expression - - @property - def query(self): - return {self.field: {"$elemMatch": self.expression}} - - -class Size(BaseFindArrayOperator): - """ - `$size` array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - Size(Sample.results, 2) - ``` - - Will return query object like - - ```python - {"results": {"$size": 2}} - ``` - - MongoDB doc: - - """ - - def __init__( - self, - field, - num: int, - ): - self.field = field - self.num = num - - @property - def query(self): - return {self.field: {"$size": self.num}} diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/bitwise.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/bitwise.py deleted file mode 100644 index 2457f150..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/bitwise.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Union - -from beanie.odm.fields import ExpressionField -from beanie.odm.operators.find import BaseFindOperator - - -class BaseFindBitwiseOperator(BaseFindOperator): - operator = "" - - def __init__(self, field: Union[str, ExpressionField], bitmask): - self.field = field - self.bitmask = bitmask - - @property - def query(self): - return {self.field: {self.operator: self.bitmask}} - - -class BitsAllClear(BaseFindBitwiseOperator): - """ - `$bitsAllClear` query operator - - MongoDB doc: - - """ - - operator = "$bitsAllClear" - - -class BitsAllSet(BaseFindBitwiseOperator): - """ - `$bitsAllSet` query operator - - MongoDB doc: - https://docs.mongodb.com/manual/reference/operator/query/bitsAllSet/ - """ - - operator = "$bitsAllSet" - - -class BitsAnyClear(BaseFindBitwiseOperator): - """ - `$bitsAnyClear` query operator - - MongoDB doc: - https://docs.mongodb.com/manual/reference/operator/query/bitsAnyClear/ - """ - - operator = "$bitsAnyClear" - - -class BitsAnySet(BaseFindBitwiseOperator): - """ - `$bitsAnySet` query operator - - MongoDB doc: - https://docs.mongodb.com/manual/reference/operator/query/bitsAnySet/ - """ - - operator = "$bitsAnySet" diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/comparison.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/comparison.py deleted file mode 100644 index ef7d6c1f..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/comparison.py +++ /dev/null @@ -1,227 +0,0 @@ -from beanie.odm.operators.find import BaseFindOperator - - -class BaseFindComparisonOperator(BaseFindOperator): - operator = "" - - def __init__( - self, - field, - other, - ) -> None: - self.field = field - self.other = other - - @property - def query(self): - return {self.field: {self.operator: self.other}} - - -class Eq(BaseFindComparisonOperator): - """ - `equal` query operator - - Example: - - ```python - class Product(Document): - price: float - - Eq(Product.price, 2) - ``` - - Will return query object like - - ```python - {"price": 2} - ``` - - MongoDB doc: - - """ - - @property - def query(self): - return {self.field: self.other} - - -class GT(BaseFindComparisonOperator): - """ - `$gt` query operator - - Example: - - ```python - class Product(Document): - price: float - - GT(Product.price, 2) - ``` - - Will return query object like - - ```python - {"price": {"$gt": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$gt" - - -class GTE(BaseFindComparisonOperator): - """ - `$gte` query operator - - Example: - - ```python - class Product(Document): - price: float - - GTE(Product.price, 2) - ``` - - Will return query object like - - ```python - {"price": {"$gte": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$gte" - - -class In(BaseFindComparisonOperator): - """ - `$in` query operator - - Example: - - ```python - class Product(Document): - price: float - - In(Product.price, [2, 3, 4]) - ``` - - Will return query object like - - ```python - {"price": {"$in": [2, 3, 4]}} - ``` - - MongoDB doc: - - """ - - operator = "$in" - - -class NotIn(BaseFindComparisonOperator): - """ - `$nin` query operator - - Example: - - ```python - class Product(Document): - price: float - - NotIn(Product.price, [2, 3, 4]) - ``` - - Will return query object like - - ```python - {"price": {"$nin": [2, 3, 4]}} - ``` - - MongoDB doc: - - """ - - operator = "$nin" - - -class LT(BaseFindComparisonOperator): - """ - `$lt` query operator - - Example: - - ```python - class Product(Document): - price: float - - LT(Product.price, 2) - ``` - - Will return query object like - - ```python - {"price": {"$lt": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$lt" - - -class LTE(BaseFindComparisonOperator): - """ - `$lte` query operator - - Example: - - ```python - class Product(Document): - price: float - - LTE(Product.price, 2) - ``` - - Will return query object like - - ```python - {"price": {"$lte": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$lte" - - -class NE(BaseFindComparisonOperator): - """ - `$ne` query operator - - Example: - - ```python - class Product(Document): - price: float - - NE(Product.price, 2) - ``` - - Will return query object like - - ```python - {"price": {"$ne": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$ne" diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/element.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/element.py deleted file mode 100644 index 0fa0ab5e..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/element.py +++ /dev/null @@ -1,75 +0,0 @@ -from abc import ABC -from typing import List, Union - -from beanie.odm.operators.find import BaseFindOperator - - -class BaseFindElementOperator(BaseFindOperator, ABC): ... - - -class Exists(BaseFindElementOperator): - """ - `$exists` query operator - - Example: - - ```python - class Product(Document): - price: float - - Exists(Product.price, True) - ``` - - Will return query object like - - ```python - {"price": {"$exists": True}} - ``` - - MongoDB doc: - - """ - - def __init__( - self, - field, - value: bool = True, - ): - self.field = field - self.value = value - - @property - def query(self): - return {self.field: {"$exists": self.value}} - - -class Type(BaseFindElementOperator): - """ - `$type` query operator - - Example: - - ```python - class Product(Document): - price: float - - Type(Product.price, "decimal") - ``` - - Will return query object like - - ```python - {"price": {"$type": "decimal"}} - ``` - - MongoDB doc: - - """ - - def __init__(self, field, types: Union[List[str], str]): - self.field = field - self.types = types - - @property - def query(self): - return {self.field: {"$type": self.types}} diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/evaluation.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/evaluation.py deleted file mode 100644 index 5a646f11..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/evaluation.py +++ /dev/null @@ -1,199 +0,0 @@ -from abc import ABC -from typing import Optional - -from beanie.odm.operators.find import BaseFindOperator - - -class BaseFindEvaluationOperator(BaseFindOperator, ABC): ... - - -class Expr(BaseFindEvaluationOperator): - """ - `$type` query operator - - Example: - - ```python - class Sample(Document): - one: int - two: int - - Expr({"$gt": [ "$one" , "$two" ]}) - ``` - - Will return query object like - - ```python - {"$expr": {"$gt": [ "$one" , "$two" ]}} - ``` - - MongoDB doc: - - """ - - def __init__(self, expression: dict): - self.expression = expression - - @property - def query(self): - return {"$expr": self.expression} - - -class JsonSchema(BaseFindEvaluationOperator): - """ - `$jsonSchema` query operator - - MongoDB doc: - - """ - - def __init__(self, expression: dict): - self.expression = expression - - @property - def query(self): - return {"$jsonSchema": self.expression} - - -class Mod(BaseFindEvaluationOperator): - """ - `$mod` query operator - - Example: - - ```python - class Sample(Document): - one: int - - Mod(Sample.one, 4, 0) - ``` - - Will return query object like - - ```python - { "one": { "$mod": [ 4, 0 ] } } - ``` - - MongoDB doc: - - """ - - def __init__(self, field, divisor: int, remainder: int): - self.field = field - self.divisor = divisor - self.remainder = remainder - - @property - def query(self): - return {self.field: {"$mod": [self.divisor, self.remainder]}} - - -class RegEx(BaseFindEvaluationOperator): - """ - `$regex` query operator - - MongoDB doc: - - """ - - def __init__( - self, - field, - pattern: str, - options: Optional[str] = None, - ): - self.field = field - self.pattern = pattern - self.options = options - - @property - def query(self): - expression = {"$regex": self.pattern} - if self.options: - expression["$options"] = self.options - return {self.field: expression} - - -class Text(BaseFindEvaluationOperator): - """ - `$text` query operator - - Example: - - ```python - class Sample(Document): - description: Indexed(str, pymongo.TEXT) - - Text("coffee") - ``` - - Will return query object like - - ```python - { - "$text": { - "$search": "coffee" , - "$caseSensitive": False, - "$diacriticSensitive": False - } - } - ``` - - MongoDB doc: - - - Note: if you need to run a query against Azure Cosmos DB for MongoDB, - which does not support diacritic sensitivity yet, you can set - `diacritic_sensitive` argument to `None` to exclude it from the query. - """ - - def __init__( - self, - search: str, - language: Optional[str] = None, - case_sensitive: bool = False, - diacritic_sensitive: Optional[bool] = False, - ): - """ - - :param search: str - :param language: Optional[str] = None - :param case_sensitive: bool = False - :param diacritic_sensitive: Optional[bool] = False - """ - self.search = search - self.language = language - self.case_sensitive = case_sensitive - self.diacritic_sensitive = diacritic_sensitive - - @property - def query(self): - expression = { - "$text": { - "$search": self.search, - "$caseSensitive": self.case_sensitive, - } - } - if self.language: - expression["$text"]["$language"] = self.language - if self.diacritic_sensitive is not None: - expression["$text"]["$diacriticSensitive"] = ( - self.diacritic_sensitive - ) - return expression - - -class Where(BaseFindEvaluationOperator): - """ - `$where` query operator - - MongoDB doc: - - """ - - def __init__(self, expression: str): - self.expression = expression - - @property - def query(self): - return {"$where": self.expression} diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/geospatial.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/geospatial.py deleted file mode 100644 index d1fcab5c..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/geospatial.py +++ /dev/null @@ -1,315 +0,0 @@ -from abc import ABC -from enum import Enum -from typing import List, Optional - -from beanie.odm.operators.find import BaseFindOperator - - -class BaseFindGeospatialOperator(BaseFindOperator, ABC): ... - - -class GeoIntersects(BaseFindGeospatialOperator): - """ - `$geoIntersects` query operator - - Example: - - ```python - class GeoObject(BaseModel): - type: str = "Point" - coordinates: Tuple[float, float] - - class Place(Document): - geo: GeoObject - - class Collection: - name = "places" - indexes = [ - [("geo", pymongo.GEOSPHERE)], # GEO index - ] - - GeoIntersects(Place.geo, "Polygon", [[0,0], [1,1], [3,3]]) - ``` - - Will return query object like - - ```python - { - "geo": { - "$geoIntersects": { - "$geometry": { - "type": "Polygon", - "coordinates": [[0,0], [1,1], [3,3]], - } - } - } - } - ``` - - MongoDB doc: - - """ - - def __init__(self, field, geo_type: str, coordinates: List[List[float]]): - self.field = field - self.geo_type = geo_type - self.coordinates = coordinates - - @property - def query(self): - return { - self.field: { - "$geoIntersects": { - "$geometry": { - "type": self.geo_type, - "coordinates": self.coordinates, - } - } - } - } - - -class GeoWithinTypes(str, Enum): - Polygon = "Polygon" - MultiPolygon = "MultiPolygon" - - -class GeoWithin(BaseFindGeospatialOperator): - """ - `$geoWithin` query operator - - Example: - - ```python - class GeoObject(BaseModel): - type: str = "Point" - coordinates: Tuple[float, float] - - class Place(Document): - geo: GeoObject - - class Collection: - name = "places" - indexes = [ - [("geo", pymongo.GEOSPHERE)], # GEO index - ] - - GeoWithin(Place.geo, "Polygon", [[0,0], [1,1], [3,3]]) - ``` - - Will return query object like - - ```python - { - "geo": { - "$geoWithin": { - "$geometry": { - "type": "Polygon", - "coordinates": [[0,0], [1,1], [3,3]], - } - } - } - } - ``` - - MongoDB doc: - - """ - - def __init__( - self, field, geo_type: GeoWithinTypes, coordinates: List[List[float]] - ): - self.field = field - self.geo_type = geo_type - self.coordinates = coordinates - - @property - def query(self): - return { - self.field: { - "$geoWithin": { - "$geometry": { - "type": self.geo_type, - "coordinates": self.coordinates, - } - } - } - } - - -class Box(BaseFindGeospatialOperator): - """ - `$box` query operator - - Example: - - ```python - class GeoObject(BaseModel): - type: str = "Point" - coordinates: Tuple[float, float] - - class Place(Document): - geo: GeoObject - - class Collection: - name = "places" - indexes = [ - [("geo", pymongo.GEOSPHERE)], # GEO index - ] - - Box(Place.geo, lower_left=[10,12], upper_right=[15,20]) - ``` - - Will return query object like - - ```python - { - "geo": { - "$geoWithin": { - "$box": [[10, 12], [15, 20]] - } - } - } - ``` - - MongoDB doc: - - """ - - def __init__( - self, field, lower_left: List[float], upper_right: List[float] - ): - self.field = field - self.coordinates = [lower_left, upper_right] - - @property - def query(self): - return {self.field: {"$geoWithin": {"$box": self.coordinates}}} - - -class Near(BaseFindGeospatialOperator): - """ - `$near` query operator - - Example: - - ```python - class GeoObject(BaseModel): - type: str = "Point" - coordinates: Tuple[float, float] - - class Place(Document): - geo: GeoObject - - class Collection: - name = "places" - indexes = [ - [("geo", pymongo.GEOSPHERE)], # GEO index - ] - - Near(Place.geo, 1.2345, 2.3456, min_distance=500) - ``` - - Will return query object like - - ```python - { - "geo": { - "$near": { - "$geometry": { - "type": "Point", - "coordinates": [1.2345, 2.3456], - }, - "$maxDistance": 500, - } - } - } - ``` - - MongoDB doc: - - """ - - operator = "$near" - - def __init__( - self, - field, - longitude: float, - latitude: float, - max_distance: Optional[float] = None, - min_distance: Optional[float] = None, - ): - self.field = field - self.longitude = longitude - self.latitude = latitude - self.max_distance = max_distance - self.min_distance = min_distance - - @property - def query(self): - expression = { - self.field: { - self.operator: { - "$geometry": { - "type": "Point", - "coordinates": [self.longitude, self.latitude], - }, - } - } - } - if self.max_distance: - expression[self.field][self.operator]["$maxDistance"] = ( - self.max_distance - ) # type: ignore - if self.min_distance: - expression[self.field][self.operator]["$minDistance"] = ( - self.min_distance - ) # type: ignore - return expression - - -class NearSphere(Near): - """ - `$nearSphere` query operator - - Example: - - ```python - class GeoObject(BaseModel): - type: str = "Point" - coordinates: Tuple[float, float] - - class Place(Document): - geo: GeoObject - - class Collection: - name = "places" - indexes = [ - [("geo", pymongo.GEOSPHERE)], # GEO index - ] - - NearSphere(Place.geo, 1.2345, 2.3456, min_distance=500) - ``` - - Will return query object like - - ```python - { - "geo": { - "$nearSphere": { - "$geometry": { - "type": "Point", - "coordinates": [1.2345, 2.3456], - }, - "$maxDistance": 500, - } - } - } - ``` - - MongoDB doc: - - """ - - operator = "$nearSphere" diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/logical.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/find/logical.py deleted file mode 100644 index 96adcd21..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/find/logical.py +++ /dev/null @@ -1,166 +0,0 @@ -from abc import ABC -from typing import Any, Dict, Mapping, Union - -from beanie.odm.operators.find import BaseFindOperator - - -class BaseFindLogicalOperator(BaseFindOperator, ABC): ... - - -class LogicalOperatorForListOfExpressions(BaseFindLogicalOperator): - # todo: handle query return typing - operator: str = "" - - def __init__( - self, - *expressions: Union[ - BaseFindOperator, Dict[str, Any], Mapping[str, Any], bool - ], - ): - self.expressions = list(expressions) - - @property - def query(self): - if not self.expressions: - raise AttributeError("At least one expression must be provided") - if len(self.expressions) == 1: - return self.expressions[0] - return {self.operator: self.expressions} - - -class Or(LogicalOperatorForListOfExpressions): - """ - `$or` query operator - - Example: - - ```python - class Product(Document): - price: float - category: str - - Or(Product.price<10, Product.category=="Sweets") - ``` - - Will return query object like - - ```python - {"$or": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} - ``` - - MongoDB doc: - - """ - - operator = "$or" - - -class And(LogicalOperatorForListOfExpressions): - """ - `$and` query operator - - Example: - - ```python - class Product(Document): - price: float - category: str - - And(Product.price<10, Product.category=="Sweets") - ``` - - Will return query object like - - ```python - {"$and": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} - ``` - - MongoDB doc: - - """ - - operator = "$and" - - -class Nor(BaseFindLogicalOperator): - """ - `$nor` query operator - - Example: - - ```python - class Product(Document): - price: float - category: str - - Nor(Product.price<10, Product.category=="Sweets") - ``` - - Will return query object like - - ```python - {"$nor": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} - ``` - - MongoDB doc: - - """ - - def __init__( - self, - *expressions: Union[ - BaseFindOperator, Dict[str, Any], Mapping[str, Any], bool - ], - ): - self.expressions = list(expressions) - - @property - def query(self): - return {"$nor": self.expressions} - - -class Not(BaseFindLogicalOperator): - """ - `$not` query operator - - Example: - - ```python - class Product(Document): - price: float - category: str - - Not(Product.price<10) - ``` - - Will return query object like - - ```python - {"$not": {"price": {"$lt": 10}}} - ``` - - MongoDB doc: - - """ - - def __init__(self, expression: Mapping[str, Any]): - self.expression = expression - - @property - def query(self): - if len(self.expression) == 1: - expression_key = list(self.expression.keys())[0] - if expression_key.startswith("$"): - raise AttributeError( - "Not operator can not be used with operators" - ) - value = self.expression[expression_key] - if isinstance(value, dict): - internal_key = list(value.keys())[0] - if internal_key.startswith("$"): - return {expression_key: {"$not": value}} - - return {expression_key: {"$not": {"$eq": value}}} - raise AttributeError( - "Not operator can only be used with one expression" - ) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__init__.py deleted file mode 100644 index 7fa8dff2..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from abc import abstractmethod -from typing import Any, Mapping - -from beanie.odm.operators import BaseOperator - - -class BaseUpdateOperator(BaseOperator): - @property - @abstractmethod - def query(self) -> Mapping[str, Any]: ... diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2005d233..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/array.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/array.cpython-312.pyc deleted file mode 100644 index 119b5d53..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/array.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/bitwise.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/bitwise.cpython-312.pyc deleted file mode 100644 index eda8c8bd..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/bitwise.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/general.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/general.cpython-312.pyc deleted file mode 100644 index 90f12ea8..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/__pycache__/general.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/array.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/array.py deleted file mode 100644 index 1347333e..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/array.py +++ /dev/null @@ -1,142 +0,0 @@ -from beanie.odm.operators.update import BaseUpdateOperator - - -class BaseUpdateArrayOperator(BaseUpdateOperator): - operator = "" - - def __init__(self, expression): - self.expression = expression - - @property - def query(self): - return {self.operator: self.expression} - - -class AddToSet(BaseUpdateArrayOperator): - """ - `$addToSet` update array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - AddToSet({Sample.results: 2}) - ``` - - Will return query object like - - ```python - {"$addToSet": {"results": 2}} - ``` - - MongoDB docs: - - """ - - operator = "$addToSet" - - -class Pop(BaseUpdateArrayOperator): - """ - `$pop` update array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - Pop({Sample.results: 2}) - ``` - - Will return query object like - - ```python - {"$pop": {"results": -1}} - ``` - - MongoDB docs: - - """ - - operator = "$pop" - - -class Pull(BaseUpdateArrayOperator): - """ - `$pull` update array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - Pull(In(Sample.result: [1,2,3,4,5]) - ``` - - Will return query object like - - ```python - {"$pull": { "results": { $in: [1,2,3,4,5] }}} - ``` - - MongoDB docs: - - """ - - operator = "$pull" - - -class Push(BaseUpdateArrayOperator): - """ - `$push` update array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - Push({Sample.results: 1}) - ``` - - Will return query object like - - ```python - {"$push": { "results": 1}} - ``` - - MongoDB docs: - - """ - - operator = "$push" - - -class PullAll(BaseUpdateArrayOperator): - """ - `$pullAll` update array query operator - - Example: - - ```python - class Sample(Document): - results: List[int] - - PullAll({ Sample.results: [ 0, 5 ] }) - ``` - - Will return query object like - - ```python - {"$pullAll": { "results": [ 0, 5 ] }} - ``` - - MongoDB docs: - - """ - - operator = "$pullAll" diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/bitwise.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/bitwise.py deleted file mode 100644 index c69351a8..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/bitwise.py +++ /dev/null @@ -1,22 +0,0 @@ -from abc import ABC - -from beanie.odm.operators.update import BaseUpdateOperator - - -class BaseUpdateBitwiseOperator(BaseUpdateOperator, ABC): ... - - -class Bit(BaseUpdateBitwiseOperator): - """ - `$bit` update query operator - - MongoDB doc: - - """ - - def __init__(self, expression: dict): - self.expression = expression - - @property - def query(self): - return {"$bit": self.expression} diff --git a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/general.py b/venv/lib/python3.12/site-packages/beanie/odm/operators/update/general.py deleted file mode 100644 index e4994396..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/operators/update/general.py +++ /dev/null @@ -1,234 +0,0 @@ -from beanie.odm.operators.update import BaseUpdateOperator - - -class BaseUpdateGeneralOperator(BaseUpdateOperator): - operator = "" - - def __init__(self, expression): - self.expression = expression - - @property - def query(self): - return {self.operator: self.expression} - - -class Set(BaseUpdateGeneralOperator): - """ - `$set` update query operator - - Example: - - ```python - class Sample(Document): - one: int - - Set({Sample.one: 2}) - ``` - - Will return query object like - - ```python - {"$set": {"one": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$set" - - -class SetRevisionId: - """ - `$set` update query operator - - Example: - - ```python - class Sample(Document): - one: int - - Set({Sample.one: 2}) - ``` - - Will return query object like - - ```python - {"$set": {"one": 2}} - ``` - - MongoDB doc: - - """ - - def __init__(self, revision_id): - self.revision_id = revision_id - self.operator = "$set" - self.expression = {"revision_id": self.revision_id} - - @property - def query(self): - return {self.operator: self.expression} - - -class CurrentDate(BaseUpdateGeneralOperator): - """ - `$currentDate` update query operator - - Example: - - ```python - class Sample(Document): - ts: datetime - - CurrentDate({Sample.ts: True}) - ``` - - Will return query object like - - ```python - {"$currentDate": {"ts": True}} - ``` - - MongoDB doc: - - """ - - operator = "$currentDate" - - -class Inc(BaseUpdateGeneralOperator): - """ - `$inc` update query operator - - Example: - - ```python - class Sample(Document): - one: int - - Inc({Sample.one: 2}) - ``` - - Will return query object like - - ```python - {"$inc": {"one": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$inc" - - -class Min(BaseUpdateGeneralOperator): - """ - `$min` update query operator - - Example: - - ```python - class Sample(Document): - one: int - - Min({Sample.one: 2}) - ``` - - Will return query object like - - ```python - {"$min": {"one": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$min" - - -class Max(BaseUpdateGeneralOperator): - """ - `$max` update query operator - - Example: - - ```python - class Sample(Document): - one: int - - Max({Sample.one: 2}) - ``` - - Will return query object like - - ```python - {"$max": {"one": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$max" - - -class Mul(BaseUpdateGeneralOperator): - """ - `$mul` update query operator - - Example: - - ```python - class Sample(Document): - one: int - - Mul({Sample.one: 2}) - ``` - - Will return query object like - - ```python - {"$mul": {"one": 2}} - ``` - - MongoDB doc: - - """ - - operator = "$mul" - - -class Rename(BaseUpdateGeneralOperator): - """ - `$rename` update query operator - - MongoDB doc: - - """ - - operator = "$rename" - - -class SetOnInsert(BaseUpdateGeneralOperator): - """ - `$setOnInsert` update query operator - - MongoDB doc: - - """ - - operator = "$setOnInsert" - - -class Unset(BaseUpdateGeneralOperator): - """ - `$unset` update query operator - - MongoDB doc: - - """ - - operator = "$unset" diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/queries/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 2e377eb0..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/aggregation.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/aggregation.cpython-312.pyc deleted file mode 100644 index 9ca6e9a4..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/aggregation.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/cursor.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/cursor.cpython-312.pyc deleted file mode 100644 index a629b4cf..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/cursor.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/delete.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/delete.cpython-312.pyc deleted file mode 100644 index 1ae730eb..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/delete.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/find.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/find.cpython-312.pyc deleted file mode 100644 index 9d092eb6..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/find.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/update.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/update.cpython-312.pyc deleted file mode 100644 index 43dc273c..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/queries/__pycache__/update.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/aggregation.py b/venv/lib/python3.12/site-packages/beanie/odm/queries/aggregation.py deleted file mode 100644 index 9dba0b22..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/queries/aggregation.py +++ /dev/null @@ -1,105 +0,0 @@ -from typing import ( - TYPE_CHECKING, - Any, - Generic, - List, - Mapping, - Optional, - Type, - TypeVar, -) - -from pydantic import BaseModel -from pymongo.asynchronous.command_cursor import AsyncCommandCursor - -from beanie.odm.cache import LRUCache -from beanie.odm.interfaces.clone import CloneInterface -from beanie.odm.interfaces.session import SessionMethods -from beanie.odm.queries.cursor import BaseCursorQuery -from beanie.odm.utils.projection import get_projection - -if TYPE_CHECKING: - from beanie.odm.documents import DocType - -AggregationProjectionType = TypeVar("AggregationProjectionType") - - -class AggregationQuery( - Generic[AggregationProjectionType], - BaseCursorQuery[AggregationProjectionType], - SessionMethods, - CloneInterface, -): - """ - Aggregation Query - """ - - def __init__( - self, - document_model: Type["DocType"], - aggregation_pipeline: List[Mapping[str, Any]], - find_query: Mapping[str, Any], - projection_model: Optional[Type[BaseModel]] = None, - ignore_cache: bool = False, - **pymongo_kwargs: Any, - ): - self.aggregation_pipeline: List[Mapping[str, Any]] = ( - aggregation_pipeline - ) - self.document_model = document_model - self.projection_model = projection_model - self.find_query = find_query - self.session = None - self.ignore_cache = ignore_cache - self.pymongo_kwargs = pymongo_kwargs - - @property - def _cache_key(self) -> str: - return LRUCache.create_key( - { - "type": "Aggregation", - "filter": self.find_query, - "pipeline": self.aggregation_pipeline, - "projection": get_projection(self.projection_model) - if self.projection_model - else None, - } - ) - - def _get_cache(self): - if ( - self.document_model.get_settings().use_cache - and self.ignore_cache is False - ): - return self.document_model._cache.get(self._cache_key) # type: ignore - else: - return None - - def _set_cache(self, data): - if ( - self.document_model.get_settings().use_cache - and self.ignore_cache is False - ): - return self.document_model._cache.set(self._cache_key, data) # type: ignore - - def get_aggregation_pipeline( - self, - ) -> List[Mapping[str, Any]]: - match_pipeline: List[Mapping[str, Any]] = ( - [{"$match": self.find_query}] if self.find_query else [] - ) - projection_pipeline: List[Mapping[str, Any]] = [] - if self.projection_model: - projection = get_projection(self.projection_model) - if projection is not None: - projection_pipeline = [{"$project": projection}] - return match_pipeline + self.aggregation_pipeline + projection_pipeline - - async def get_cursor(self) -> AsyncCommandCursor: - aggregation_pipeline = self.get_aggregation_pipeline() - return await self.document_model.get_pymongo_collection().aggregate( - aggregation_pipeline, session=self.session, **self.pymongo_kwargs - ) - - def get_projection_model(self) -> Optional[Type[BaseModel]]: - return self.projection_model diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/cursor.py b/venv/lib/python3.12/site-packages/beanie/odm/queries/cursor.py deleted file mode 100644 index fd98a97e..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/queries/cursor.py +++ /dev/null @@ -1,79 +0,0 @@ -from abc import abstractmethod -from typing import ( - Any, - Dict, - Generic, - List, - Optional, - Type, - TypeVar, - cast, -) - -from pydantic.main import BaseModel - -from beanie.odm.utils.parsing import parse_obj - -CursorResultType = TypeVar("CursorResultType") - - -class BaseCursorQuery(Generic[CursorResultType]): - """ - BaseCursorQuery class. Wrapper over AsyncCursor, - which parse result with model - """ - - cursor = None - lazy_parse = False - - @abstractmethod - def get_projection_model(self) -> Optional[Type[BaseModel]]: ... - - @abstractmethod - async def get_cursor(self): ... - - def _cursor_params(self): ... - - def __aiter__(self): - return self - - async def __anext__(self) -> CursorResultType: - if self.cursor is None: - self.cursor = await self.get_cursor() - next_item = await self.cursor.__anext__() - projection = self.get_projection_model() - if projection is None: - return next_item - return parse_obj(projection, next_item, lazy_parse=self.lazy_parse) # type: ignore - - @abstractmethod - def _get_cache(self) -> List[Dict[str, Any]]: ... - - @abstractmethod - def _set_cache(self, data): ... - - async def to_list( - self, length: Optional[int] = None - ) -> List[CursorResultType]: # noqa - """ - Get list of documents - - :param length: Optional[int] - length of the list - :return: Union[List[BaseModel], List[Dict[str, Any]]] - """ - cursor = await self.get_cursor() - pymongo_list: List[Dict[str, Any]] = self._get_cache() - - if pymongo_list is None: - pymongo_list = await cursor.to_list(length) - self._set_cache(pymongo_list) - projection = self.get_projection_model() - if projection is not None: - return cast( - List[CursorResultType], - [ - parse_obj(projection, i, lazy_parse=self.lazy_parse) - for i in pymongo_list - ], - ) - return cast(List[CursorResultType], pymongo_list) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/delete.py b/venv/lib/python3.12/site-packages/beanie/odm/queries/delete.py deleted file mode 100644 index f574318e..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/queries/delete.py +++ /dev/null @@ -1,85 +0,0 @@ -from typing import TYPE_CHECKING, Any, Dict, Generator, Mapping, Optional, Type - -from pymongo import DeleteMany as DeleteManyPyMongo -from pymongo import DeleteOne as DeleteOnePyMongo -from pymongo.asynchronous.client_session import AsyncClientSession -from pymongo.results import DeleteResult - -from beanie.odm.bulk import BulkWriter -from beanie.odm.interfaces.clone import CloneInterface -from beanie.odm.interfaces.session import SessionMethods - -if TYPE_CHECKING: - from beanie.odm.documents import DocType - - -class DeleteQuery(SessionMethods, CloneInterface): - """ - Deletion Query - """ - - def __init__( - self, - document_model: Type["DocType"], - find_query: Mapping[str, Any], - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ): - self.document_model = document_model - self.find_query = find_query - self.session: Optional[AsyncClientSession] = None - self.bulk_writer = bulk_writer - self.pymongo_kwargs: Dict[str, Any] = pymongo_kwargs - - -class DeleteMany(DeleteQuery): - def __await__( - self, - ) -> Generator[DeleteResult, None, Optional[DeleteResult]]: - """ - Run the query - :return: - """ - if self.bulk_writer is None: - return ( - yield from self.document_model.get_pymongo_collection() - .delete_many( - self.find_query, - session=self.session, - **self.pymongo_kwargs, - ) - .__await__() - ) - else: - self.bulk_writer.add_operation( - self.document_model, - DeleteManyPyMongo(self.find_query, **self.pymongo_kwargs), - ) - return None - - -class DeleteOne(DeleteQuery): - def __await__( - self, - ) -> Generator[DeleteResult, None, Optional[DeleteResult]]: - """ - Run the query - :return: - """ - if self.bulk_writer is None: - return ( - yield from self.document_model.get_pymongo_collection() - .delete_one( - self.find_query, - session=self.session, - **self.pymongo_kwargs, - ) - .__await__() - ) - else: - self.bulk_writer.add_operation( - self.document_model, - DeleteOnePyMongo(self.find_query), - **self.pymongo_kwargs, - ) - return None diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/find.py b/venv/lib/python3.12/site-packages/beanie/odm/queries/find.py deleted file mode 100644 index 59b74f9d..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/queries/find.py +++ /dev/null @@ -1,1051 +0,0 @@ -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Coroutine, - Dict, - Generator, - Generic, - List, - Mapping, - Optional, - Tuple, - Type, - TypeVar, - Union, - cast, - overload, -) - -from pydantic import BaseModel -from pymongo import ReplaceOne -from pymongo.asynchronous.client_session import AsyncClientSession -from pymongo.results import UpdateResult - -from beanie.exceptions import DocumentNotFound -from beanie.odm.bulk import BulkWriter -from beanie.odm.cache import LRUCache -from beanie.odm.enums import SortDirection -from beanie.odm.interfaces.aggregation_methods import AggregateMethods -from beanie.odm.interfaces.clone import CloneInterface -from beanie.odm.interfaces.session import SessionMethods -from beanie.odm.interfaces.update import UpdateMethods -from beanie.odm.operators.find.logical import And -from beanie.odm.queries.aggregation import AggregationQuery -from beanie.odm.queries.cursor import BaseCursorQuery -from beanie.odm.queries.delete import ( - DeleteMany, - DeleteOne, -) -from beanie.odm.queries.update import ( - UpdateMany, - UpdateOne, - UpdateQuery, - UpdateResponse, -) -from beanie.odm.utils.dump import get_dict -from beanie.odm.utils.encoder import Encoder -from beanie.odm.utils.find import construct_lookup_queries, split_text_query -from beanie.odm.utils.parsing import parse_obj -from beanie.odm.utils.projection import get_projection -from beanie.odm.utils.relations import convert_ids - -if TYPE_CHECKING: - from beanie.odm.documents import DocType - -FindQueryProjectionType = TypeVar("FindQueryProjectionType", bound=BaseModel) -FindQueryResultType = TypeVar("FindQueryResultType", bound=BaseModel) - - -class FindQuery( - Generic[FindQueryResultType], UpdateMethods, SessionMethods, CloneInterface -): - """ - Find Query base class - """ - - UpdateQueryType: Union[ - Type[UpdateQuery], Type[UpdateMany], Type[UpdateOne] - ] = UpdateQuery - DeleteQueryType: Union[Type[DeleteOne], Type[DeleteMany]] = DeleteMany - AggregationQueryType = AggregationQuery - - def __init__(self, document_model: Type["DocType"]): - self.document_model = document_model - self.find_expressions: List[Mapping[str, Any]] = [] - self.projection_model: Type[FindQueryResultType] = cast( - Type[FindQueryResultType], self.document_model - ) - self.session = None - self.encoders: Dict[Any, Callable[[Any], Any]] = {} - self.ignore_cache: bool = False - self.encoders = self.document_model.get_bson_encoders() - self.fetch_links: bool = False - self.pymongo_kwargs: Dict[str, Any] = {} - self.lazy_parse = False - self.nesting_depth: Optional[int] = None - self.nesting_depths_per_field: Optional[Dict[str, int]] = None - - def prepare_find_expressions(self): - if self.document_model.get_link_fields() is not None: - for i, query in enumerate(self.find_expressions): - self.find_expressions[i] = convert_ids( - query, - doc=self.document_model, # type: ignore - fetch_links=self.fetch_links, - ) - - def get_filter_query(self) -> Mapping[str, Any]: - """ - - Returns: MongoDB filter query - - """ - self.prepare_find_expressions() - if self.find_expressions: - return Encoder(custom_encoders=self.encoders).encode( - And(*self.find_expressions).query - ) - else: - return {} - - def delete( - self, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ) -> Union[DeleteOne, DeleteMany]: - """ - Provide search criteria to the Delete query - - :param session: Optional[AsyncClientSession] - pymongo session - :return: Union[DeleteOne, DeleteMany] - """ - self.set_session(session=session) - return self.DeleteQueryType( - document_model=self.document_model, - find_query=self.get_filter_query(), - bulk_writer=bulk_writer, - **pymongo_kwargs, - ).set_session(session=session) - - def project(self, projection_model): - """ - Apply projection parameter - :param projection_model: Optional[Type[BaseModel]] - projection model - :return: self - """ - if projection_model is not None: - self.projection_model = projection_model - return self - - def get_projection_model(self) -> Type[FindQueryResultType]: - return self.projection_model - - async def count(self) -> int: - """ - Number of found documents - :return: int - """ - kwargs = {} - if isinstance(self, FindMany): - if self.limit_number: - kwargs["limit"] = self.limit_number - if self.skip_number: - kwargs["skip"] = self.skip_number - return ( - await self.document_model.get_pymongo_collection().count_documents( - self.get_filter_query(), session=self.session, **kwargs - ) - ) - - async def exists(self) -> bool: - """ - If find query will return anything - - :return: bool - """ - return await self.count() > 0 - - -class FindMany( - FindQuery[FindQueryResultType], - BaseCursorQuery[FindQueryResultType], - AggregateMethods, -): - """ - Find Many query class - """ - - UpdateQueryType = UpdateMany - DeleteQueryType = DeleteMany - - def __init__(self, document_model: Type["DocType"]): - super(FindMany, self).__init__(document_model=document_model) - self.sort_expressions: List[Tuple[str, SortDirection]] = [] - self.skip_number: int = 0 - self.limit_number: int = 0 - - @overload - def find_many( - self: "FindMany[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: None = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> "FindMany[FindQueryResultType]": ... - - @overload - def find_many( - self: "FindMany[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type[FindQueryProjectionType]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> "FindMany[FindQueryProjectionType]": ... - - def find_many( - self: "FindMany[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type[FindQueryProjectionType]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[ - "FindMany[FindQueryResultType]", "FindMany[FindQueryProjectionType]" - ]: - """ - Find many documents by criteria - - :param args: *Mapping[Any, Any] - search criteria - :param skip: Optional[int] - The number of documents to omit. - :param limit: Optional[int] - The maximum number of results to return. - :param sort: Union[None, str, List[Tuple[str, SortDirection]]] - A key - or a list of (key, direction) pairs specifying the sort order - for this query. - :param projection_model: Optional[Type[BaseModel]] - projection model - :param session: Optional[AsyncClientSession] - pymongo session - :param ignore_cache: bool - :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) - :return: FindMany - query instance - """ - self.find_expressions += args # type: ignore # bool workaround - self.skip(skip) - self.limit(limit) - self.sort(sort) - self.project(projection_model) - self.set_session(session=session) - self.ignore_cache = ignore_cache - self.fetch_links = fetch_links - self.pymongo_kwargs.update(pymongo_kwargs) - self.nesting_depth = nesting_depth - self.nesting_depths_per_field = nesting_depths_per_field - if lazy_parse is True: - self.lazy_parse = lazy_parse - return self - - # TODO probably merge FindOne and FindMany to one class to avoid this - # code duplication - - @overload - def project( - self: "FindMany", - projection_model: None, - ) -> "FindMany[FindQueryResultType]": ... - - @overload - def project( - self: "FindMany", - projection_model: Type[FindQueryProjectionType], - ) -> "FindMany[FindQueryProjectionType]": ... - - def project( - self: "FindMany", - projection_model: Optional[Type[FindQueryProjectionType]], - ) -> Union[ - "FindMany[FindQueryResultType]", "FindMany[FindQueryProjectionType]" - ]: - """ - Apply projection parameter - - :param projection_model: Optional[Type[BaseModel]] - projection model - :return: self - """ - super().project(projection_model) - return self - - @overload - def find( - self: "FindMany[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: None = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> "FindMany[FindQueryResultType]": ... - - @overload - def find( - self: "FindMany[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type[FindQueryProjectionType]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> "FindMany[FindQueryProjectionType]": ... - - def find( - self: "FindMany[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type[FindQueryProjectionType]] = None, - skip: Optional[int] = None, - limit: Optional[int] = None, - sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - lazy_parse: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[ - "FindMany[FindQueryResultType]", "FindMany[FindQueryProjectionType]" - ]: - """ - The same as `find_many(...)` - """ - return self.find_many( - *args, - skip=skip, - limit=limit, - sort=sort, - projection_model=projection_model, - session=session, - ignore_cache=ignore_cache, - fetch_links=fetch_links, - lazy_parse=lazy_parse, - nesting_depth=nesting_depth, - nesting_depths_per_field=nesting_depths_per_field, - **pymongo_kwargs, - ) - - def sort( - self, - *args: Optional[ - Union[ - str, Tuple[str, SortDirection], List[Tuple[str, SortDirection]] - ] - ], - ) -> "FindMany[FindQueryResultType]": - """ - Add sort parameters - :param args: Union[str, Tuple[str, SortDirection], - List[Tuple[str, SortDirection]]] - A key or a tuple (key, direction) - or a list of (key, direction) pairs specifying - the sort order for this query. - :return: self - """ - for arg in args: - if arg is None: - pass - elif isinstance(arg, list): - self.sort(*arg) - elif isinstance(arg, tuple): - self.sort_expressions.append(arg) - elif isinstance(arg, str): - if arg.startswith("+"): - self.sort_expressions.append( - (arg[1:], SortDirection.ASCENDING) - ) - elif arg.startswith("-"): - self.sort_expressions.append( - (arg[1:], SortDirection.DESCENDING) - ) - else: - self.sort_expressions.append( - (arg, SortDirection.ASCENDING) - ) - else: - raise TypeError("Wrong argument type") - return self - - def skip(self, n: Optional[int]) -> "FindMany[FindQueryResultType]": - """ - Set skip parameter - :param n: int - :return: self - """ - if n is not None: - self.skip_number = n - return self - - def limit(self, n: Optional[int]) -> "FindMany[FindQueryResultType]": - """ - Set limit parameter - :param n: int - :return: - """ - if n is not None: - self.limit_number = n - return self - - def update( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ): - """ - Create Update with modifications query - and provide search criteria there - - :param args: *Mapping[str,Any] - the modifications to apply. - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - :return: UpdateMany query - """ - self.set_session(session) - return ( - self.UpdateQueryType( - document_model=self.document_model, - find_query=self.get_filter_query(), - ) - .update(*args, bulk_writer=bulk_writer, **pymongo_kwargs) - .set_session(session=self.session) - ) - - def upsert( - self, - *args: Mapping[str, Any], - on_insert: "DocType", - session: Optional[AsyncClientSession] = None, - **pymongo_kwargs: Any, - ): - """ - Create Update with modifications query - and provide search criteria there - - :param args: *Mapping[str,Any] - the modifications to apply. - :param on_insert: DocType - document to insert if there is no matched - document in the collection - :param session: Optional[AsyncClientSession] - pymongo session - :return: UpdateMany query - """ - self.set_session(session) - return ( - self.UpdateQueryType( - document_model=self.document_model, - find_query=self.get_filter_query(), - ) - .upsert( - *args, - on_insert=on_insert, - **pymongo_kwargs, - ) - .set_session(session=self.session) - ) - - def update_many( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ) -> UpdateMany: - """ - Provide search criteria to the - [UpdateMany](query.md#updatemany) query - - :param args: *Mapping[str,Any] - the modifications to apply. - :param session: Optional[AsyncClientSession] - pymongo session - :return: [UpdateMany](query.md#updatemany) query - """ - return cast( - UpdateMany, - self.update( - *args, - session=session, - bulk_writer=bulk_writer, - **pymongo_kwargs, - ), - ) - - def delete_many( - self, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ) -> DeleteMany: - """ - Provide search criteria to the [DeleteMany](query.md#deletemany) query - - :param session: Optional[AsyncClientSession] - pymongo session - :return: [DeleteMany](query.md#deletemany) query - """ - # We need to cast here to tell mypy that we are sure about the type. - # This is because delete may also return a DeleteOne type in general, and mypy can not be sure in this case - # See https://mypy.readthedocs.io/en/stable/common_issues.html#narrowing-and-inner-functions - return cast( - DeleteMany, - self.delete( - session=session, bulk_writer=bulk_writer, **pymongo_kwargs - ), - ) - - @overload - def aggregate( - self, - aggregation_pipeline: List[Any], - projection_model: None = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - **pymongo_kwargs: Any, - ) -> AggregationQuery[Dict[str, Any]]: ... - - @overload - def aggregate( - self, - aggregation_pipeline: List[Any], - projection_model: Type[FindQueryProjectionType], - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - **pymongo_kwargs: Any, - ) -> AggregationQuery[FindQueryProjectionType]: ... - - def aggregate( - self, - aggregation_pipeline: List[Any], - projection_model: Optional[Type[FindQueryProjectionType]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - **pymongo_kwargs: Any, - ) -> Union[ - AggregationQuery[Dict[str, Any]], - AggregationQuery[FindQueryProjectionType], - ]: - """ - Provide search criteria to the [AggregationQuery](query.md#aggregationquery) - - :param aggregation_pipeline: list - aggregation pipeline. MongoDB doc: - - :param projection_model: Type[BaseModel] - Projection Model - :param session: Optional[AsyncClientSession] - pymongo session - :param ignore_cache: bool - :return:[AggregationQuery](query.md#aggregationquery) - """ - self.set_session(session=session) - return self.AggregationQueryType( - self.document_model, - self.build_aggregation_pipeline(*aggregation_pipeline), - find_query={}, - projection_model=projection_model, - ignore_cache=ignore_cache, - **pymongo_kwargs, - ).set_session(session=self.session) - - @property - def _cache_key(self) -> str: - return LRUCache.create_key( - { - "type": "FindMany", - "filter": self.get_filter_query(), - "sort": self.sort_expressions, - "projection": get_projection(self.projection_model), - "skip": self.skip_number, - "limit": self.limit_number, - } - ) - - def _get_cache(self): - if ( - self.document_model.get_settings().use_cache - and self.ignore_cache is False - ): - return self.document_model._cache.get(self._cache_key) # type: ignore - else: - return None - - def _set_cache(self, data): - if ( - self.document_model.get_settings().use_cache - and self.ignore_cache is False - ): - return self.document_model._cache.set(self._cache_key, data) # type: ignore - - def build_aggregation_pipeline(self, *extra_stages): - if self.fetch_links: - aggregation_pipeline: List[Dict[str, Any]] = ( - construct_lookup_queries( - self.document_model, - nesting_depth=self.nesting_depth, - nesting_depths_per_field=self.nesting_depths_per_field, - ) - ) - else: - aggregation_pipeline = [] - filter_query = self.get_filter_query() - - if filter_query: - text_queries, non_text_queries = split_text_query(filter_query) - - if text_queries: - aggregation_pipeline.insert( - 0, - { - "$match": ( - {"$and": text_queries} - if len(text_queries) > 1 - else text_queries[0] - ) - }, - ) - - if non_text_queries: - aggregation_pipeline.append( - { - "$match": ( - {"$and": non_text_queries} - if len(non_text_queries) > 1 - else non_text_queries[0] - ) - } - ) - - if extra_stages: - aggregation_pipeline.extend(extra_stages) - sort_pipeline = {"$sort": {i[0]: i[1] for i in self.sort_expressions}} - if sort_pipeline["$sort"]: - aggregation_pipeline.append(sort_pipeline) - if self.skip_number != 0: - aggregation_pipeline.append({"$skip": self.skip_number}) - if self.limit_number != 0: - aggregation_pipeline.append({"$limit": self.limit_number}) - return aggregation_pipeline - - async def get_cursor(self): - if self.fetch_links: - aggregation_pipeline: List[Dict[str, Any]] = ( - self.build_aggregation_pipeline() - ) - - projection = get_projection(self.projection_model) - - if projection is not None: - aggregation_pipeline.append({"$project": projection}) - - return ( - await self.document_model.get_pymongo_collection().aggregate( - aggregation_pipeline, - session=self.session, - **self.pymongo_kwargs, - ) - ) - - return self.document_model.get_pymongo_collection().find( - filter=self.get_filter_query(), - sort=self.sort_expressions, - projection=get_projection(self.projection_model), - skip=self.skip_number, - limit=self.limit_number, - session=self.session, - **self.pymongo_kwargs, - ) - - async def first_or_none(self) -> Optional[FindQueryResultType]: - """ - Returns the first found element or None if no elements were found - """ - existing_limit = self.limit_number - try: - result = await self.limit(1).to_list() - return result[0] if result else None - finally: - self.limit_number = existing_limit - - async def count(self) -> int: - """ - Number of found documents - :return: int - """ - if self.fetch_links: - aggregation_pipeline: List[Dict[str, Any]] = ( - self.build_aggregation_pipeline() - ) - - aggregation_pipeline.append({"$count": "count"}) - cursor = ( - await self.document_model.get_pymongo_collection().aggregate( - aggregation_pipeline, - session=self.session, - **self.pymongo_kwargs, - ) - ) - result = await cursor.to_list(length=1) - - return result[0]["count"] if result else 0 - - return await super(FindMany, self).count() - - -class FindOne(FindQuery[FindQueryResultType]): - """ - Find One query class - """ - - UpdateQueryType = UpdateOne - DeleteQueryType = DeleteOne - - @overload - def project( - self: "FindOne[FindQueryResultType]", - projection_model: None = None, - ) -> "FindOne[FindQueryResultType]": ... - - @overload - def project( - self: "FindOne[FindQueryResultType]", - projection_model: Type[FindQueryProjectionType], - ) -> "FindOne[FindQueryProjectionType]": ... - - # TODO probably merge FindOne and FindMany to one class to avoid this - # code duplication - - def project( - self: "FindOne[FindQueryResultType]", - projection_model: Optional[Type[FindQueryProjectionType]] = None, - ) -> Union[ - "FindOne[FindQueryResultType]", "FindOne[FindQueryProjectionType]" - ]: - """ - Apply projection parameter - :param projection_model: Optional[Type[BaseModel]] - projection model - :return: self - """ - super().project(projection_model) - return self - - @overload - def find_one( - self: "FindOne[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: None = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> "FindOne[FindQueryResultType]": ... - - @overload - def find_one( - self: "FindOne[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: Type[FindQueryProjectionType], - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> "FindOne[FindQueryProjectionType]": ... - - def find_one( - self: "FindOne[FindQueryResultType]", - *args: Union[Mapping[Any, Any], bool], - projection_model: Optional[Type[FindQueryProjectionType]] = None, - session: Optional[AsyncClientSession] = None, - ignore_cache: bool = False, - fetch_links: bool = False, - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, - **pymongo_kwargs: Any, - ) -> Union[ - "FindOne[FindQueryResultType]", "FindOne[FindQueryProjectionType]" - ]: - """ - Find one document by criteria - - :param args: *Mapping[Any, Any] - search criteria - :param projection_model: Optional[Type[BaseModel]] - projection model - :param session: Optional[AsyncClientSession] - pymongo session - :param ignore_cache: bool - :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) - :return: FindOne - query instance - """ - self.find_expressions += args # type: ignore # bool workaround - self.project(projection_model) - self.set_session(session=session) - self.ignore_cache = ignore_cache - self.fetch_links = fetch_links - self.pymongo_kwargs.update(pymongo_kwargs) - self.nesting_depth = nesting_depth - self.nesting_depths_per_field = nesting_depths_per_field - return self - - def update( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - response_type: Optional[UpdateResponse] = None, - **pymongo_kwargs: Any, - ): - """ - Create Update with modifications query - and provide search criteria there - - :param args: *Mapping[str,Any] - the modifications to apply. - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - :param response_type: Optional[UpdateResponse] - :return: UpdateMany query - """ - self.set_session(session) - return ( - self.UpdateQueryType( - document_model=self.document_model, - find_query=self.get_filter_query(), - ) - .update( - *args, - bulk_writer=bulk_writer, - response_type=response_type, - **pymongo_kwargs, - ) - .set_session(session=self.session) - ) - - def upsert( - self, - *args: Mapping[str, Any], - on_insert: "DocType", - session: Optional[AsyncClientSession] = None, - response_type: Optional[UpdateResponse] = None, - **pymongo_kwargs: Any, - ): - """ - Create Update with modifications query - and provide search criteria there - - :param args: *Mapping[str,Any] - the modifications to apply. - :param on_insert: DocType - document to insert if there is no matched - document in the collection - :param session: Optional[AsyncClientSession] - pymongo session - :param response_type: Optional[UpdateResponse] - :return: UpdateMany query - """ - self.set_session(session) - return ( - self.UpdateQueryType( - document_model=self.document_model, - find_query=self.get_filter_query(), - ) - .upsert( - *args, - on_insert=on_insert, - response_type=response_type, - **pymongo_kwargs, - ) - .set_session(session=self.session) - ) - - def update_one( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - response_type: Optional[UpdateResponse] = None, - **pymongo_kwargs: Any, - ) -> UpdateOne: - """ - Create [UpdateOne](query.md#updateone) query using modifications and - provide search criteria there - :param args: *Mapping[str,Any] - the modifications to apply - :param session: Optional[AsyncClientSession] - pymongo session - :param response_type: Optional[UpdateResponse] - :return: [UpdateOne](query.md#updateone) query - """ - return cast( - UpdateOne, - self.update( - *args, - session=session, - bulk_writer=bulk_writer, - response_type=response_type, - **pymongo_kwargs, - ), - ) - - def delete_one( - self, - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ) -> DeleteOne: - """ - Provide search criteria to the [DeleteOne](query.md#deleteone) query - :param session: Optional[AsyncClientSession] - pymongo session - :return: [DeleteOne](query.md#deleteone) query - """ - # We need to cast here to tell mypy that we are sure about the type. - # This is because delete may also return a DeleteOne type in general, and mypy can not be sure in this case - # See https://mypy.readthedocs.io/en/stable/common_issues.html#narrowing-and-inner-functions - return cast( - DeleteOne, - self.delete( - session=session, bulk_writer=bulk_writer, **pymongo_kwargs - ), - ) - - async def replace_one( - self, - document: "DocType", - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - ) -> Optional[UpdateResult]: - """ - Replace found document by provided - :param document: Document - document, which will replace the found one - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - Beanie bulk writer - :return: UpdateResult - """ - self.set_session(session=session) - if bulk_writer is None: - result: UpdateResult = ( - await self.document_model.get_pymongo_collection().replace_one( - self.get_filter_query(), - get_dict( - document, - to_db=True, - exclude={"_id"}, - keep_nulls=document.get_settings().keep_nulls, - ), - session=self.session, - ) - ) - - if not result.raw_result["updatedExisting"]: - raise DocumentNotFound - return result - else: - bulk_writer.add_operation( - self.document_model, - ReplaceOne( - self.get_filter_query(), - get_dict( - document, - to_db=True, - exclude={"_id"}, - keep_nulls=document.get_settings().keep_nulls, - ), - **self.pymongo_kwargs, - ), - ) - return None - - async def _find_one(self): - if self.fetch_links: - return await self.document_model.find_many( - *self.find_expressions, - session=self.session, - fetch_links=self.fetch_links, - projection_model=self.projection_model, - nesting_depth=self.nesting_depth, - nesting_depths_per_field=self.nesting_depths_per_field, - **self.pymongo_kwargs, - ).first_or_none() - return await self.document_model.get_pymongo_collection().find_one( - filter=self.get_filter_query(), - projection=get_projection(self.projection_model), - session=self.session, - **self.pymongo_kwargs, - ) - - def __await__( - self, - ) -> Generator[Coroutine, Any, Optional[FindQueryResultType]]: - """ - Run the query - :return: BaseModel - """ - # projection = get_projection(self.projection_model) - if ( - self.document_model.get_settings().use_cache - and self.ignore_cache is False - ): - cache_key = LRUCache.create_key( - "FindOne", - self.get_filter_query(), - self.projection_model, - self.session, - self.fetch_links, - ) - document: Dict[str, Any] = self.document_model._cache.get( # type: ignore - cache_key - ) - if document is None: - document = yield from self._find_one().__await__() # type: ignore - self.document_model._cache.set(cache_key, document) # type: ignore - else: - document = yield from self._find_one().__await__() # type: ignore - if document is None: - return None - if type(document) is self.projection_model: - return cast(FindQueryResultType, document) - return cast( - FindQueryResultType, parse_obj(self.projection_model, document) - ) - - async def count(self) -> int: - """ - Count the number of documents matching the query - :return: int - """ - if self.fetch_links: - return await self.document_model.find_many( - *self.find_expressions, - session=self.session, - fetch_links=self.fetch_links, - **self.pymongo_kwargs, - ).count() - return await super(FindOne, self).count() diff --git a/venv/lib/python3.12/site-packages/beanie/odm/queries/update.py b/venv/lib/python3.12/site-packages/beanie/odm/queries/update.py deleted file mode 100644 index f48e21ba..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/queries/update.py +++ /dev/null @@ -1,366 +0,0 @@ -from abc import abstractmethod -from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - List, - Mapping, - Optional, - Type, - Union, -) - -from pymongo import ReturnDocument -from pymongo import UpdateMany as UpdateManyPyMongo -from pymongo import UpdateOne as UpdateOnePyMongo -from pymongo.asynchronous.client_session import AsyncClientSession -from pymongo.results import InsertOneResult, UpdateResult - -from beanie.odm.bulk import BulkWriter -from beanie.odm.interfaces.clone import CloneInterface -from beanie.odm.interfaces.session import SessionMethods -from beanie.odm.interfaces.update import ( - UpdateMethods, -) -from beanie.odm.operators.update import BaseUpdateOperator -from beanie.odm.operators.update.general import SetRevisionId -from beanie.odm.utils.encoder import Encoder -from beanie.odm.utils.parsing import parse_obj - -if TYPE_CHECKING: - from beanie.odm.documents import DocType - - -class UpdateResponse(str, Enum): - UPDATE_RESULT = "UPDATE_RESULT" # PyMongo update result - OLD_DOCUMENT = "OLD_DOCUMENT" # Original document - NEW_DOCUMENT = "NEW_DOCUMENT" # Updated document - - -class UpdateQuery(UpdateMethods, SessionMethods, CloneInterface): - """ - Update Query base class - """ - - def __init__( - self, - document_model: Type["DocType"], - find_query: Mapping[str, Any], - ): - self.document_model = document_model - self.find_query = find_query - self.update_expressions: List[Mapping[str, Any]] = [] - self.session = None - self.is_upsert = False - self.upsert_insert_doc: Optional["DocType"] = None - self.encoders: Dict[Any, Callable[[Any], Any]] = {} - self.bulk_writer: Optional[BulkWriter] = None - self.encoders = self.document_model.get_settings().bson_encoders - self.pymongo_kwargs: Dict[str, Any] = {} - - @property - def update_query(self) -> Dict[str, Any]: - query: Union[Dict[str, Any], List[Dict[str, Any]], None] = None - for expression in self.update_expressions: - if isinstance(expression, BaseUpdateOperator): - if query is None: - query = {} - if isinstance(query, list): - raise TypeError("Wrong expression type") - query.update(expression.query) - elif isinstance(expression, dict): - if query is None: - query = {} - if isinstance(query, list): - raise TypeError("Wrong expression type") - query.update(expression) - elif isinstance(expression, SetRevisionId): - if query is None: - query = {} - if isinstance(query, list): - raise TypeError("Wrong expression type") - set_query = query.get("$set", {}) - set_query.update(expression.query.get("$set", {})) - query["$set"] = set_query - elif isinstance(expression, list): - if query is None: - query = [] - if isinstance(query, dict): - raise TypeError("Wrong expression type") - query.extend(expression) - else: - raise TypeError("Wrong expression type") - return Encoder(custom_encoders=self.encoders).encode(query) - - @abstractmethod - async def _update(self) -> UpdateResult: ... - - -class UpdateMany(UpdateQuery): - """ - Update Many query class - """ - - def update( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ) -> "UpdateQuery": - """ - Provide modifications to the update query. - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - :param pymongo_kwargs: pymongo native parameters for update operation - :return: UpdateMany query - """ - self.set_session(session=session) - self.update_expressions += args - if bulk_writer: - self.bulk_writer = bulk_writer - self.pymongo_kwargs.update(pymongo_kwargs) - return self - - def upsert( - self, - *args: Mapping[str, Any], - on_insert: "DocType", - session: Optional[AsyncClientSession] = None, - **pymongo_kwargs: Any, - ) -> "UpdateQuery": - """ - Provide modifications to the upsert query. - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param on_insert: DocType - document to insert if there is no matched - document in the collection - :param session: Optional[AsyncClientSession] - pymongo session - :param **pymongo_kwargs: pymongo native parameters for update operation - :return: UpdateMany query - """ - self.upsert_insert_doc = on_insert # type: ignore - self.update(*args, session=session, **pymongo_kwargs) - return self - - def update_many( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - **pymongo_kwargs: Any, - ): - """ - Provide modifications to the update query - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :param pymongo_kwargs: pymongo native parameters for update operation - :return: UpdateMany query - """ - return self.update( - *args, session=session, bulk_writer=bulk_writer, **pymongo_kwargs - ) - - async def _update(self): - if self.bulk_writer is None: - return ( - await self.document_model.get_pymongo_collection().update_many( - self.find_query, - self.update_query, - session=self.session, - **self.pymongo_kwargs, - ) - ) - else: - self.bulk_writer.add_operation( - self.document_model, - UpdateManyPyMongo( - self.find_query, self.update_query, **self.pymongo_kwargs - ), - ) - - def __await__( - self, - ) -> Generator[ - Any, None, Union[UpdateResult, InsertOneResult, Optional["DocType"]] - ]: - """ - Run the query - :return: - """ - - update_result = yield from self._update().__await__() - if self.upsert_insert_doc is None: - return update_result - - if update_result is not None and update_result.matched_count == 0: - return ( - yield from self.document_model.insert_one( - document=self.upsert_insert_doc, - session=self.session, - bulk_writer=self.bulk_writer, - ).__await__() - ) - - return update_result - - -class UpdateOne(UpdateQuery): - """ - Update One query class - """ - - def __init__(self, *args: Any, **kwargs: Any): - super(UpdateOne, self).__init__(*args, **kwargs) - self.response_type = UpdateResponse.UPDATE_RESULT - - def update( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - response_type: Optional[UpdateResponse] = None, - **pymongo_kwargs: Any, - ) -> "UpdateQuery": - """ - Provide modifications to the update query. - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: Optional[BulkWriter] - :param response_type: UpdateResponse - :param pymongo_kwargs: pymongo native parameters for update operation - :return: UpdateMany query - """ - self.set_session(session=session) - self.update_expressions += args - if response_type is not None: - self.response_type = response_type - if bulk_writer: - self.bulk_writer = bulk_writer - self.pymongo_kwargs.update(pymongo_kwargs) - return self - - def upsert( - self, - *args: Mapping[str, Any], - on_insert: "DocType", - session: Optional[AsyncClientSession] = None, - response_type: Optional[UpdateResponse] = None, - **pymongo_kwargs: Any, - ) -> "UpdateQuery": - """ - Provide modifications to the upsert query. - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param on_insert: DocType - document to insert if there is no matched - document in the collection - :param session: Optional[AsyncClientSession] - pymongo session - :param response_type: Optional[UpdateResponse] - :param pymongo_kwargs: pymongo native parameters for update operation - :return: UpdateMany query - """ - self.upsert_insert_doc = on_insert # type: ignore - self.update( - *args, - response_type=response_type, - session=session, - **pymongo_kwargs, - ) - return self - - def update_one( - self, - *args: Mapping[str, Any], - session: Optional[AsyncClientSession] = None, - bulk_writer: Optional[BulkWriter] = None, - response_type: Optional[UpdateResponse] = None, - **pymongo_kwargs: Any, - ): - """ - Provide modifications to the update query. The same as `update()` - - :param args: *Union[dict, Mapping] - the modifications to apply. - :param session: Optional[AsyncClientSession] - pymongo session - :param bulk_writer: "BulkWriter" - Beanie bulk writer - :param response_type: Optional[UpdateResponse] - :param pymongo_kwargs: pymongo native parameters for update operation - :return: UpdateMany query - """ - return self.update( - *args, - session=session, - bulk_writer=bulk_writer, - response_type=response_type, - **pymongo_kwargs, - ) - - async def _update(self): - if not self.bulk_writer: - if self.response_type == UpdateResponse.UPDATE_RESULT: - return await self.document_model.get_pymongo_collection().update_one( - self.find_query, - self.update_query, - session=self.session, - **self.pymongo_kwargs, - ) - else: - result = await self.document_model.get_pymongo_collection().find_one_and_update( - self.find_query, - self.update_query, - session=self.session, - return_document=( - ReturnDocument.BEFORE - if self.response_type == UpdateResponse.OLD_DOCUMENT - else ReturnDocument.AFTER - ), - **self.pymongo_kwargs, - ) - if result is not None: - result = parse_obj(self.document_model, result) - return result - else: - self.bulk_writer.add_operation( - self.document_model, - UpdateOnePyMongo( - self.find_query, self.update_query, **self.pymongo_kwargs - ), - ) - - def __await__( - self, - ) -> Generator[ - Any, None, Union[UpdateResult, InsertOneResult, Optional["DocType"]] - ]: - """ - Run the query - :return: - """ - update_result = yield from self._update().__await__() - if self.upsert_insert_doc is None: - return update_result - - if ( - self.response_type == UpdateResponse.UPDATE_RESULT - and update_result is not None - and update_result.matched_count == 0 - ) or ( - self.response_type != UpdateResponse.UPDATE_RESULT - and update_result is None - ): - return ( - yield from self.document_model.insert_one( - document=self.upsert_insert_doc, - session=self.session, - bulk_writer=self.bulk_writer, - ).__await__() - ) - - return update_result diff --git a/venv/lib/python3.12/site-packages/beanie/odm/registry.py b/venv/lib/python3.12/site-packages/beanie/odm/registry.py deleted file mode 100644 index 25c4193c..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/registry.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Dict, ForwardRef, Type, Union - -from pydantic import BaseModel - - -class DocsRegistry: - _registry: Dict[str, Type[BaseModel]] = {} - - @classmethod - def register(cls, name: str, doc_type: Type[BaseModel]): - cls._registry[name] = doc_type - - @classmethod - def get(cls, name: str) -> Type[BaseModel]: - return cls._registry[name] - - @classmethod - def evaluate_fr(cls, forward_ref: Union[ForwardRef, Type]): - """ - Evaluate forward ref - - :param forward_ref: ForwardRef - forward ref to evaluate - :return: Type[BaseModel] - class of the forward ref - """ - if ( - isinstance(forward_ref, ForwardRef) - and forward_ref.__forward_arg__ in cls._registry - ): - return cls._registry[forward_ref.__forward_arg__] - else: - return forward_ref diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/settings/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index bc5a2762..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 88fce6b5..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/document.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/document.cpython-312.pyc deleted file mode 100644 index d40a353e..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/document.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/timeseries.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/timeseries.cpython-312.pyc deleted file mode 100644 index 7a8e9e8a..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/timeseries.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/union_doc.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/union_doc.cpython-312.pyc deleted file mode 100644 index 7200a035..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/union_doc.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/view.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/view.cpython-312.pyc deleted file mode 100644 index a50f1836..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/settings/__pycache__/view.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/base.py b/venv/lib/python3.12/site-packages/beanie/odm/settings/base.py deleted file mode 100644 index 279e0c1c..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/settings/base.py +++ /dev/null @@ -1,39 +0,0 @@ -from datetime import timedelta -from typing import Any, Dict, Optional, Type - -from pydantic import BaseModel, Field -from pymongo.asynchronous.collection import AsyncCollection -from pymongo.asynchronous.database import AsyncDatabase - -from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 - -if IS_PYDANTIC_V2: - from pydantic import ConfigDict - - -class ItemSettings(BaseModel): - name: Optional[str] = None - - use_cache: bool = False - cache_capacity: int = 32 - cache_expiration_time: timedelta = timedelta(minutes=10) - bson_encoders: Dict[Any, Any] = Field(default_factory=dict) - projection: Optional[Dict[str, Any]] = None - - pymongo_db: Optional[AsyncDatabase] = None - pymongo_collection: Optional[AsyncCollection] = None - - union_doc: Optional[Type] = None - union_doc_alias: Optional[str] = None - class_id: str = "_class_id" - - is_root: bool = False - - if IS_PYDANTIC_V2: - model_config = ConfigDict( - arbitrary_types_allowed=True, - ) - else: - - class Config: - arbitrary_types_allowed = True diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/document.py b/venv/lib/python3.12/site-packages/beanie/odm/settings/document.py deleted file mode 100644 index b006bdea..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/settings/document.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import List, Optional - -from pydantic import Field - -from beanie.odm.fields import IndexModelField -from beanie.odm.settings.base import ItemSettings -from beanie.odm.settings.timeseries import TimeSeriesConfig -from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 - -if IS_PYDANTIC_V2: - from pydantic import ConfigDict - - -class DocumentSettings(ItemSettings): - use_state_management: bool = False - state_management_replace_objects: bool = False - state_management_save_previous: bool = False - validate_on_save: bool = False - use_revision: bool = False - single_root_inheritance: bool = False - - indexes: List[IndexModelField] = Field(default_factory=list) - merge_indexes: bool = False - timeseries: Optional[TimeSeriesConfig] = None - - lazy_parsing: bool = False - - keep_nulls: bool = True - - max_nesting_depths_per_field: dict = Field(default_factory=dict) - max_nesting_depth: int = 3 - - if IS_PYDANTIC_V2: - model_config = ConfigDict( - arbitrary_types_allowed=True, - ) - else: - - class Config: - arbitrary_types_allowed = True diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/timeseries.py b/venv/lib/python3.12/site-packages/beanie/odm/settings/timeseries.py deleted file mode 100644 index 8fb6329f..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/settings/timeseries.py +++ /dev/null @@ -1,52 +0,0 @@ -from enum import Enum -from typing import Annotated, Any, Dict, Optional - -from pydantic import BaseModel, Field - - -class Granularity(str, Enum): - """ - Time Series Granuality - """ - - seconds = "seconds" - minutes = "minutes" - hours = "hours" - - -class TimeSeriesConfig(BaseModel): - """ - Time Series Collection config - """ - - time_field: str - meta_field: Optional[str] = None - granularity: Optional[Granularity] = None - bucket_max_span_seconds: Optional[int] = None - bucket_rounding_second: Annotated[ - Optional[int], - Field( - deprecated="This field is deprecated in favor of " - "'bucket_rounding_seconds'.", - ), - ] = None - bucket_rounding_seconds: Optional[int] = None - expire_after_seconds: Optional[int] = None - - def build_query(self, collection_name: str) -> Dict[str, Any]: - res: Dict[str, Any] = {"name": collection_name} - timeseries: Dict[str, Any] = {"timeField": self.time_field} - if self.meta_field is not None: - timeseries["metaField"] = self.meta_field - if self.granularity is not None: - timeseries["granularity"] = self.granularity - if self.bucket_max_span_seconds is not None: - timeseries["bucketMaxSpanSeconds"] = self.bucket_max_span_seconds - if self.bucket_rounding_second is not None: # Deprecated field - timeseries["bucketRoundingSeconds"] = self.bucket_rounding_second - if self.bucket_rounding_seconds is not None: - timeseries["bucketRoundingSeconds"] = self.bucket_rounding_seconds - res["timeseries"] = timeseries - if self.expire_after_seconds is not None: - res["expireAfterSeconds"] = self.expire_after_seconds - return res diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/union_doc.py b/venv/lib/python3.12/site-packages/beanie/odm/settings/union_doc.py deleted file mode 100644 index a86a18bf..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/settings/union_doc.py +++ /dev/null @@ -1,4 +0,0 @@ -from beanie.odm.settings.base import ItemSettings - - -class UnionDocSettings(ItemSettings): ... diff --git a/venv/lib/python3.12/site-packages/beanie/odm/settings/view.py b/venv/lib/python3.12/site-packages/beanie/odm/settings/view.py deleted file mode 100644 index 54319b0a..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/settings/view.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Any, Dict, List, Type, Union - -from pydantic import Field - -from beanie.odm.settings.base import ItemSettings - - -class ViewSettings(ItemSettings): - source: Union[str, Type] - pipeline: List[Dict[str, Any]] - - max_nesting_depths_per_field: dict = Field(default_factory=dict) - max_nesting_depth: int = 3 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/union_doc.py b/venv/lib/python3.12/site-packages/beanie/odm/union_doc.py deleted file mode 100644 index 6da87364..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/union_doc.py +++ /dev/null @@ -1,81 +0,0 @@ -from typing import Any, ClassVar, Dict, Optional, Type, TypeVar - -from pymongo.asynchronous.client_session import AsyncClientSession - -from beanie.exceptions import UnionDocNotInited -from beanie.odm.bulk import BulkWriter -from beanie.odm.interfaces.aggregate import AggregateInterface -from beanie.odm.interfaces.detector import DetectionInterface, ModelType -from beanie.odm.interfaces.find import FindInterface -from beanie.odm.interfaces.getters import OtherGettersInterface -from beanie.odm.settings.union_doc import UnionDocSettings - -UnionDocType = TypeVar("UnionDocType", bound="UnionDoc") - - -class UnionDoc( - FindInterface, - AggregateInterface, - OtherGettersInterface, - DetectionInterface, -): - _document_models: ClassVar[Optional[Dict[str, Type]]] = None - _is_inited: ClassVar[bool] = False - _settings: ClassVar[UnionDocSettings] - - @classmethod - def get_settings(cls) -> UnionDocSettings: - return cls._settings - - @classmethod - def register_doc(cls, name: str, doc_model: Type): - if cls._document_models is None: - cls._document_models = {} - - if cls._is_inited is False: - raise UnionDocNotInited - - cls._document_models[name] = doc_model - return cls.get_settings().name - - @classmethod - def get_model_type(cls) -> ModelType: - return ModelType.UnionDoc - - @classmethod - def bulk_writer( - cls, - session: Optional[AsyncClientSession] = None, - ordered: bool = True, - bypass_document_validation: bool = False, - comment: Optional[Any] = None, - ) -> BulkWriter: - """ - Returns a BulkWriter instance for handling bulk write operations. - - :param session: Optional[AsyncClientSession] - pymongo session. - The session instance used for transactional operations. - :param ordered: bool - If ``True`` (the default), requests will be performed on the server serially, in the order provided. If an error - occurs, all remaining operations are aborted. If ``False``, requests will be performed on the server in - arbitrary order, possibly in parallel, and all operations will be attempted. - :param bypass_document_validation: bool, optional - If ``True``, allows the write to opt-out of document-level validation. Default is ``False``. - :param comment: str, optional - A user-provided comment to attach to the BulkWriter. - - :returns: BulkWriter - An instance of BulkWriter configured with the provided settings. - - Example Usage: - -------------- - This method is typically used within an asynchronous context manager. - - .. code-block:: python - - async with Document.bulk_writer(ordered=True) as bulk: - await Document.insert_one(Document(field="value"), bulk_writer=bulk) - """ - return BulkWriter( - session, ordered, cls, bypass_document_validation, comment - ) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__init__.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 990b844d..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/dump.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/dump.cpython-312.pyc deleted file mode 100644 index b0d73d4e..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/dump.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/encoder.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/encoder.cpython-312.pyc deleted file mode 100644 index 11a6e4df..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/encoder.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/find.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/find.cpython-312.pyc deleted file mode 100644 index f08b5e7a..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/find.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/general.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/general.cpython-312.pyc deleted file mode 100644 index 2a69f2bb..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/general.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/init.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/init.cpython-312.pyc deleted file mode 100644 index 1e034ee0..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/init.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/parsing.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/parsing.cpython-312.pyc deleted file mode 100644 index 75f6c924..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/parsing.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/projection.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/projection.cpython-312.pyc deleted file mode 100644 index 0ad36ec7..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/projection.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/pydantic.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/pydantic.cpython-312.pyc deleted file mode 100644 index 5b7d7db5..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/pydantic.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/relations.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/relations.cpython-312.pyc deleted file mode 100644 index 83269d87..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/relations.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/self_validation.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/self_validation.cpython-312.pyc deleted file mode 100644 index 6b1bb781..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/self_validation.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/state.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/state.cpython-312.pyc deleted file mode 100644 index 148171c6..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/state.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/typing.cpython-312.pyc b/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/typing.cpython-312.pyc deleted file mode 100644 index dab3cb05..00000000 Binary files a/venv/lib/python3.12/site-packages/beanie/odm/utils/__pycache__/typing.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/dump.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/dump.py deleted file mode 100644 index a030fc9a..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/dump.py +++ /dev/null @@ -1,50 +0,0 @@ -from typing import TYPE_CHECKING, Optional, Set - -from beanie.odm.utils.encoder import Encoder - -if TYPE_CHECKING: - from beanie.odm.documents import Document - - -def get_dict( - document: "Document", - to_db: bool = False, - exclude: Optional[Set[str]] = None, - keep_nulls: bool = True, -): - if exclude is None: - exclude = set() - if document.id is None: - exclude.add("_id") - if not document.get_settings().use_revision: - exclude.add("revision_id") - encoder = Encoder(exclude=exclude, to_db=to_db, keep_nulls=keep_nulls) - return encoder.encode(document) - - -def get_nulls( - document: "Document", - exclude: Optional[Set[str]] = None, -): - dictionary = get_dict(document, exclude=exclude, keep_nulls=True) - return filter_none(dictionary) - - -def get_top_level_nones( - document: "Document", - exclude: Optional[Set[str]] = None, -): - dictionary = get_dict(document, exclude=exclude, keep_nulls=True) - return {k: v for k, v in dictionary.items() if v is None} - - -def filter_none(d): - result = {} - for k, v in d.items(): - if isinstance(v, dict): - filtered = filter_none(v) - if filtered: - result[k] = filtered - elif v is None: - result[k] = v - return result diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/encoder.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/encoder.py deleted file mode 100644 index d8ab3fd5..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/encoder.py +++ /dev/null @@ -1,176 +0,0 @@ -import dataclasses as dc -import datetime -import decimal -import enum -import ipaddress -import operator -import pathlib -import re -import uuid -from enum import Enum -from typing import ( - Any, - Callable, - Container, - Iterable, - Mapping, - MutableMapping, - Optional, - Tuple, -) - -import bson -import pydantic - -import beanie -from beanie.odm.fields import Link, LinkTypes -from beanie.odm.utils.pydantic import ( - IS_PYDANTIC_V2, - IS_PYDANTIC_V2_10, - get_model_fields, -) - -SingleArgCallable = Callable[[Any], Any] -DEFAULT_CUSTOM_ENCODERS: MutableMapping[type, SingleArgCallable] = { - ipaddress.IPv4Address: str, - ipaddress.IPv4Interface: str, - ipaddress.IPv4Network: str, - ipaddress.IPv6Address: str, - ipaddress.IPv6Interface: str, - ipaddress.IPv6Network: str, - pathlib.PurePath: str, - pydantic.SecretBytes: pydantic.SecretBytes.get_secret_value, - pydantic.SecretStr: pydantic.SecretStr.get_secret_value, - datetime.date: lambda d: datetime.datetime.combine(d, datetime.time.min), - datetime.timedelta: operator.methodcaller("total_seconds"), - enum.Enum: operator.attrgetter("value"), - Link: operator.attrgetter("ref"), - bytes: bson.Binary, - decimal.Decimal: bson.Decimal128, - uuid.UUID: bson.Binary.from_uuid, - re.Pattern: bson.Regex.from_native, -} -if IS_PYDANTIC_V2: - from pydantic_core import Url - - DEFAULT_CUSTOM_ENCODERS[Url] = str - -if IS_PYDANTIC_V2_10: - from pydantic import AnyUrl - - DEFAULT_CUSTOM_ENCODERS[AnyUrl] = str - -BSON_SCALAR_TYPES = ( - type(None), - str, - int, - float, - datetime.datetime, - bson.Binary, - bson.DBRef, - bson.Decimal128, - bson.MaxKey, - bson.MinKey, - bson.ObjectId, - bson.Regex, -) - - -@dc.dataclass -class Encoder: - """ - BSON encoding class - """ - - exclude: Container[str] = frozenset() - custom_encoders: Mapping[type, SingleArgCallable] = dc.field( - default_factory=dict - ) - to_db: bool = False - keep_nulls: bool = True - - def _encode_document(self, obj: "beanie.Document") -> Mapping[str, Any]: - obj.parse_store() - settings = obj.get_settings() - obj_dict = {} - if settings.union_doc is not None: - obj_dict[settings.class_id] = ( - settings.union_doc_alias or obj.__class__.__name__ - ) - if obj._class_id: - obj_dict[settings.class_id] = obj._class_id - - link_fields = obj.get_link_fields() or {} - sub_encoder = Encoder( - # don't propagate self.exclude to subdocuments - custom_encoders=settings.bson_encoders, - to_db=self.to_db, - keep_nulls=self.keep_nulls, - ) - for key, value in self._iter_model_items(obj): - if key in link_fields: - link_type = link_fields[key].link_type - if link_type in (LinkTypes.DIRECT, LinkTypes.OPTIONAL_DIRECT): - if value is not None: - value = value.to_ref() - elif link_type in (LinkTypes.LIST, LinkTypes.OPTIONAL_LIST): - if value is not None: - value = [link.to_ref() for link in value] - elif self.to_db: - continue - obj_dict[key] = sub_encoder.encode(value) - return obj_dict - - def encode(self, obj: Any) -> Any: - if self.custom_encoders: - encoder = _get_encoder(obj, self.custom_encoders) - if encoder is not None: - return encoder(obj) - - if isinstance(obj, BSON_SCALAR_TYPES): - return obj - - encoder = _get_encoder(obj, DEFAULT_CUSTOM_ENCODERS) - if encoder is not None: - return encoder(obj) - - if isinstance(obj, beanie.Document): - return self._encode_document(obj) - if IS_PYDANTIC_V2 and isinstance(obj, pydantic.RootModel): - return self.encode(obj.root) - if isinstance(obj, pydantic.BaseModel): - items = self._iter_model_items(obj) - return {key: self.encode(value) for key, value in items} - if isinstance(obj, Mapping): - return { - key if isinstance(key, Enum) else str(key): self.encode(value) - for key, value in obj.items() - } - if isinstance(obj, Iterable): - return [self.encode(value) for value in obj] - - raise ValueError(f"Cannot encode {obj!r}") - - def _iter_model_items( - self, obj: pydantic.BaseModel - ) -> Iterable[Tuple[str, Any]]: - exclude, keep_nulls = self.exclude, self.keep_nulls - get_model_field = get_model_fields(obj).get - for key, value in obj.__iter__(): - field_info = get_model_field(key) - if field_info is not None: - key = field_info.alias or key - if key not in exclude and (value is not None or keep_nulls): - yield key, value - - -def _get_encoder( - obj: Any, custom_encoders: Mapping[type, SingleArgCallable] -) -> Optional[SingleArgCallable]: - encoder = custom_encoders.get(type(obj)) - if encoder is not None: - return encoder - for cls, encoder in custom_encoders.items(): - if isinstance(obj, cls): - return encoder - return None diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/find.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/find.py deleted file mode 100644 index 89dfb763..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/find.py +++ /dev/null @@ -1,403 +0,0 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type - -from beanie.odm.fields import LinkInfo, LinkTypes - -if TYPE_CHECKING: - from beanie import Document - - -# TODO: check if this is the most efficient way for -# appending subqueries to the queries var - - -def construct_lookup_queries( - cls: Type["Document"], - nesting_depth: Optional[int] = None, - nesting_depths_per_field: Optional[Dict[str, int]] = None, -) -> List[Dict[str, Any]]: - queries: List = [] - link_fields = cls.get_link_fields() - if link_fields is not None: - for link_info in link_fields.values(): - final_nesting_depth = ( - nesting_depths_per_field.get(link_info.field_name, None) - if nesting_depths_per_field is not None - else None - ) - if final_nesting_depth is None: - final_nesting_depth = nesting_depth - construct_query( - link_info=link_info, - queries=queries, - database_major_version=cls._database_major_version, - current_depth=final_nesting_depth, - ) - return queries - - -def construct_query( - link_info: LinkInfo, - queries: List, - database_major_version: int, - current_depth: Optional[int] = None, -): - if link_info.is_fetchable is False or ( - current_depth is not None and current_depth <= 0 - ): - return - if link_info.link_type in [ - LinkTypes.DIRECT, - LinkTypes.OPTIONAL_DIRECT, - ]: - if database_major_version >= 5 or link_info.nested_links is None: - lookup_steps = [ - { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "localField": f"{link_info.lookup_field_name}.$id", - "foreignField": "_id", - "as": f"_link_{link_info.field_name}", - } - }, - { - "$unwind": { - "path": f"$_link_{link_info.field_name}", - "preserveNullAndEmptyArrays": True, - } - }, - { - "$addFields": { - link_info.field_name: { - "$cond": { - "if": { - "$ifNull": [ - f"$_link_{link_info.field_name}", - False, - ] - }, - "then": f"$_link_{link_info.field_name}", - "else": f"${link_info.field_name}", - } - } - } - }, - {"$project": {f"_link_{link_info.field_name}": 0}}, - ] # type: ignore - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - if link_info.nested_links is not None: - lookup_steps[0]["$lookup"]["pipeline"] = [] # type: ignore - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore - database_major_version=database_major_version, - current_depth=new_depth, - ) - queries += lookup_steps - - else: - lookup_steps = [ - { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "let": { - "link_id": f"${link_info.lookup_field_name}.$id" - }, - "as": f"_link_{link_info.field_name}", - "pipeline": [ - { - "$match": { - "$expr": {"$eq": ["$_id", "$$link_id"]} - } - }, - ], - } - }, - { - "$unwind": { - "path": f"$_link_{link_info.field_name}", - "preserveNullAndEmptyArrays": True, - } - }, - { - "$addFields": { - link_info.field_name: { - "$cond": { - "if": { - "$ifNull": [ - f"$_link_{link_info.field_name}", - False, - ] - }, - "then": f"$_link_{link_info.field_name}", - "else": f"${link_info.field_name}", - } - } - } - }, - {"$project": {f"_link_{link_info.field_name}": 0}}, - ] - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore - database_major_version=database_major_version, - current_depth=new_depth, - ) - queries += lookup_steps - - elif link_info.link_type in [ - LinkTypes.BACK_DIRECT, - LinkTypes.OPTIONAL_BACK_DIRECT, - ]: - if database_major_version >= 5 or link_info.nested_links is None: - lookup_steps = [ - { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "localField": "_id", - "foreignField": f"{link_info.lookup_field_name}.$id", - "as": f"_link_{link_info.field_name}", - } - }, - { - "$unwind": { - "path": f"$_link_{link_info.field_name}", - "preserveNullAndEmptyArrays": True, - } - }, - { - "$addFields": { - link_info.field_name: { - "$cond": { - "if": { - "$ifNull": [ - f"$_link_{link_info.field_name}", - False, - ] - }, - "then": f"$_link_{link_info.field_name}", - "else": f"${link_info.field_name}", - } - } - } - }, - {"$project": {f"_link_{link_info.field_name}": 0}}, - ] # type: ignore - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - if link_info.nested_links is not None: - lookup_steps[0]["$lookup"]["pipeline"] = [] # type: ignore - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore - database_major_version=database_major_version, - current_depth=new_depth, - ) - queries += lookup_steps - - else: - lookup_steps = [ - { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "let": {"link_id": "$_id"}, - "as": f"_link_{link_info.field_name}", - "pipeline": [ - { - "$match": { - "$expr": { - "$eq": [ - f"${link_info.lookup_field_name}.$id", - "$$link_id", - ] - } - } - }, - ], - } - }, - { - "$unwind": { - "path": f"$_link_{link_info.field_name}", - "preserveNullAndEmptyArrays": True, - } - }, - { - "$addFields": { - link_info.field_name: { - "$cond": { - "if": { - "$ifNull": [ - f"$_link_{link_info.field_name}", - False, - ] - }, - "then": f"$_link_{link_info.field_name}", - "else": f"${link_info.field_name}", - } - } - } - }, - {"$project": {f"_link_{link_info.field_name}": 0}}, - ] - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore - database_major_version=database_major_version, - current_depth=new_depth, - ) - queries += lookup_steps - - elif link_info.link_type in [ - LinkTypes.LIST, - LinkTypes.OPTIONAL_LIST, - ]: - if database_major_version >= 5 or link_info.nested_links is None: - queries.append( - { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "localField": f"{link_info.lookup_field_name}.$id", - "foreignField": "_id", - "as": link_info.field_name, - } - } - ) - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - if link_info.nested_links is not None: - queries[-1]["$lookup"]["pipeline"] = [] - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=queries[-1]["$lookup"]["pipeline"], - database_major_version=database_major_version, - current_depth=new_depth, - ) - else: - lookup_step = { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "let": {"link_id": f"${link_info.lookup_field_name}.$id"}, - "as": link_info.field_name, - "pipeline": [ - {"$match": {"$expr": {"$in": ["$_id", "$$link_id"]}}}, - ], - } - } - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=lookup_step["$lookup"]["pipeline"], - database_major_version=database_major_version, - current_depth=new_depth, - ) - queries.append(lookup_step) - - elif link_info.link_type in [ - LinkTypes.BACK_LIST, - LinkTypes.OPTIONAL_BACK_LIST, - ]: - if database_major_version >= 5 or link_info.nested_links is None: - queries.append( - { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "localField": "_id", - "foreignField": f"{link_info.lookup_field_name}.$id", - "as": link_info.field_name, - } - } - ) - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - if link_info.nested_links is not None: - queries[-1]["$lookup"]["pipeline"] = [] - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=queries[-1]["$lookup"]["pipeline"], - database_major_version=database_major_version, - current_depth=new_depth, - ) - else: - lookup_step = { - "$lookup": { - "from": link_info.document_class.get_pymongo_collection().name, # type: ignore - "let": {"link_id": "$_id"}, - "as": link_info.field_name, - "pipeline": [ - { - "$match": { - "$expr": { - "$in": [ - "$$link_id", - f"${link_info.lookup_field_name}.$id", - ] - } - } - } - ], - } - } - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - for nested_link in link_info.nested_links: - construct_query( - link_info=link_info.nested_links[nested_link], - queries=lookup_step["$lookup"]["pipeline"], - database_major_version=database_major_version, - current_depth=new_depth, - ) - queries.append(lookup_step) - - return queries - - -def split_text_query( - query: Dict[str, Any], -) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]: - """Divide query into text and non-text matches - - :param query: Dict[str, Any] - query dict - :return: Tuple[Dict[str, Any], Dict[str, Any]] - text and non-text queries, - respectively - """ - - root_text_query_args: Dict[str, Any] = query.get("$text", None) - root_non_text_queries: Dict[str, Any] = { - k: v for k, v in query.items() if k not in {"$text", "$and"} - } - - text_queries: List[Dict[str, Any]] = ( - [{"$text": root_text_query_args}] if root_text_query_args else [] - ) - non_text_queries: List[Dict[str, Any]] = ( - [root_non_text_queries] if root_non_text_queries else [] - ) - - for match_case in query.get("$and", []): - if "$text" in match_case: - text_queries.append(match_case) - else: - non_text_queries.append(match_case) - - return text_queries, non_text_queries diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/general.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/general.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/init.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/init.py deleted file mode 100644 index bab6bf01..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/init.py +++ /dev/null @@ -1,787 +0,0 @@ -import sys - -from pymongo.asynchronous.database import AsyncDatabase -from typing_extensions import Sequence, get_args, get_origin - -from beanie.odm.utils.pydantic import ( - IS_PYDANTIC_V2, - get_extra_field_info, - get_model_fields, - parse_model, -) -from beanie.odm.utils.typing import get_index_attributes - -if sys.version_info >= (3, 10): - from types import UnionType as TypesUnionType -else: - TypesUnionType = () - -import importlib -import inspect -from typing import ( # type: ignore - List, - Optional, - Type, - Union, - _GenericAlias, -) - -from pydantic import BaseModel -from pydantic.fields import FieldInfo -from pymongo import AsyncMongoClient, IndexModel - -from beanie.exceptions import Deprecation, MongoDBVersionError -from beanie.odm.actions import ActionRegistry -from beanie.odm.cache import LRUCache -from beanie.odm.documents import DocType, Document -from beanie.odm.fields import ( - BackLink, - ExpressionField, - Link, - LinkInfo, - LinkTypes, -) -from beanie.odm.interfaces.detector import ModelType -from beanie.odm.registry import DocsRegistry -from beanie.odm.settings.document import DocumentSettings, IndexModelField -from beanie.odm.settings.union_doc import UnionDocSettings -from beanie.odm.settings.view import ViewSettings -from beanie.odm.union_doc import UnionDoc, UnionDocType -from beanie.odm.views import View - - -class Output(BaseModel): - class_name: str - collection_name: str - - -class Initializer: - def __init__( - self, - database: AsyncDatabase = None, - connection_string: Optional[str] = None, - document_models: Optional[ - Sequence[ - Union[Type["DocType"], Type["UnionDocType"], Type["View"], str] - ] - ] = None, - allow_index_dropping: bool = False, - recreate_views: bool = False, - skip_indexes: bool = False, - ): - """ - Beanie initializer - - :param database: AsyncDatabase - pymongo database instance - :param connection_string: str - MongoDB connection string - :param document_models: List[Union[Type[DocType], Type[UnionDocType], str]] - model classes - or strings with dot separated paths - :param allow_index_dropping: bool - if index dropping is allowed. - Default False - :param recreate_views: bool - if views should be recreated. Default False - it will patch the pymongo client to use process's event loop. - :param skip_indexes: bool - if you want to skip working with indexes. Default False - :return: None - """ - - self.inited_classes: List[Type] = [] - self.allow_index_dropping = allow_index_dropping - self.skip_indexes = skip_indexes - self.recreate_views = recreate_views - - self.models_with_updated_forward_refs: List[Type[BaseModel]] = [] - - if (connection_string is None and database is None) or ( - connection_string is not None and database is not None - ): - raise ValueError( - "connection_string parameter or database parameter must be set" - ) - - if document_models is None: - raise ValueError("document_models parameter must be set") - if connection_string is not None: - database = AsyncMongoClient( - connection_string - ).get_default_database() - - self.database: AsyncDatabase = database - - sort_order = { - ModelType.UnionDoc: 0, - ModelType.Document: 1, - ModelType.View: 2, - } - - self.document_models: List[ - Union[Type[DocType], Type[UnionDocType], Type[View]] - ] = [ - self.get_model(model) if isinstance(model, str) else model - for model in document_models - ] - - self.fill_docs_registry() - - self.document_models.sort( - key=lambda val: sort_order[val.get_model_type()] - ) - - def __await__(self): - for model in self.document_models: - yield from self.init_class(model).__await__() - - # General - def fill_docs_registry(self): - for model in self.document_models: - module = inspect.getmodule(model) - members = inspect.getmembers(module) - for name, obj in members: - if inspect.isclass(obj) and issubclass(obj, BaseModel): - DocsRegistry.register(name, obj) - - @staticmethod - def get_model(dot_path: str) -> Type["DocType"]: - """ - Get the model by the path in format bar.foo.Model - - :param dot_path: str - dot seprated path to the model - :return: Type[DocType] - class of the model - """ - module_name, class_name = None, None - try: - module_name, class_name = dot_path.rsplit(".", 1) - return getattr(importlib.import_module(module_name), class_name) - - except ValueError: - raise ValueError( - f"'{dot_path}' doesn't have '.' path, eg. path.to.your.model.class" - ) - - except AttributeError: - raise AttributeError( - f"module '{module_name}' has no class called '{class_name}'" - ) - - def init_settings( - self, cls: Union[Type[Document], Type[View], Type[UnionDoc]] - ): - """ - Init Settings - - :param cls: Union[Type[Document], Type[View], Type[UnionDoc]] - Class - to init settings - :return: None - """ - settings_class = getattr(cls, "Settings", None) - settings_vars = {} - if settings_class is not None: - # get all attributes of the Settings subclass (including inherited ones) - # without magic dunder methods - settings_vars = { - attr: getattr(settings_class, attr) - for attr in dir(settings_class) - if not attr.startswith("__") - } - if issubclass(cls, Document): - cls._document_settings = parse_model( - DocumentSettings, settings_vars - ) - if issubclass(cls, View): - cls._settings = parse_model(ViewSettings, settings_vars) - if issubclass(cls, UnionDoc): - cls._settings = parse_model(UnionDocSettings, settings_vars) - - if not IS_PYDANTIC_V2: - - def update_forward_refs(self, cls: Type[BaseModel]): - """ - Update forward refs - - :param cls: Type[BaseModel] - class to update forward refs - :return: None - """ - if cls not in self.models_with_updated_forward_refs: - cls.update_forward_refs() - self.models_with_updated_forward_refs.append(cls) - - # General. Relations - - def detect_link( - self, field: FieldInfo, field_name: str - ) -> Optional[LinkInfo]: - """ - It detects link and returns LinkInfo if any found. - - :param field: ModelField - :return: Optional[LinkInfo] - """ - - origin = get_origin(field.annotation) - args = get_args(field.annotation) - classes = [ - Link, - BackLink, - ] - - for cls in classes: - # Check if annotation is one of the custom classes - if ( - isinstance(field.annotation, _GenericAlias) - and field.annotation.__origin__ is cls - ): - if cls is Link: - return LinkInfo( - field_name=field_name, - lookup_field_name=field_name, - document_class=DocsRegistry.evaluate_fr(args[0]), # type: ignore - link_type=LinkTypes.DIRECT, - ) - if cls is BackLink: - return LinkInfo( - field_name=field_name, - lookup_field_name=get_extra_field_info( - field, "original_field" - ), # type: ignore - document_class=DocsRegistry.evaluate_fr(args[0]), # type: ignore - link_type=LinkTypes.BACK_DIRECT, - ) - - # Check if annotation is List[custom class] - elif ( - (origin is List or origin is list) - and len(args) == 1 - and isinstance(args[0], _GenericAlias) - and args[0].__origin__ is cls - ): - if cls is Link: - return LinkInfo( - field_name=field_name, - lookup_field_name=field_name, - document_class=DocsRegistry.evaluate_fr( - get_args(args[0])[0] - ), # type: ignore - link_type=LinkTypes.LIST, - ) - if cls is BackLink: - return LinkInfo( - field_name=field_name, - lookup_field_name=get_extra_field_info( # type: ignore - field, "original_field" - ), - document_class=DocsRegistry.evaluate_fr( - get_args(args[0])[0] - ), # type: ignore - link_type=LinkTypes.BACK_LIST, - ) - - # Check if annotation is Optional[custom class] or Optional[List[custom class]] - elif ( - (origin is Union or origin is TypesUnionType) - and len(args) == 2 - and type(None) in args - ): - if args[1] is type(None): - optional = args[0] - else: - optional = args[1] - optional_origin = get_origin(optional) - optional_args = get_args(optional) - - if ( - isinstance(optional, _GenericAlias) - and optional.__origin__ is cls - ): - if cls is Link: - return LinkInfo( - field_name=field_name, - lookup_field_name=field_name, - document_class=DocsRegistry.evaluate_fr( - optional_args[0] - ), # type: ignore - link_type=LinkTypes.OPTIONAL_DIRECT, - ) - if cls is BackLink: - return LinkInfo( - field_name=field_name, - lookup_field_name=get_extra_field_info( - field, "original_field" - ), - document_class=DocsRegistry.evaluate_fr( - optional_args[0] - ), # type: ignore - link_type=LinkTypes.OPTIONAL_BACK_DIRECT, - ) - - elif ( - (optional_origin is List or optional_origin is list) - and len(optional_args) == 1 - and isinstance(optional_args[0], _GenericAlias) - and optional_args[0].__origin__ is cls - ): - if cls is Link: - return LinkInfo( - field_name=field_name, - lookup_field_name=field_name, - document_class=DocsRegistry.evaluate_fr( - get_args(optional_args[0])[0] - ), # type: ignore - link_type=LinkTypes.OPTIONAL_LIST, - ) - if cls is BackLink: - return LinkInfo( - field_name=field_name, - lookup_field_name=get_extra_field_info( - field, "original_field" - ), - document_class=DocsRegistry.evaluate_fr( - get_args(optional_args[0])[0] - ), # type: ignore - link_type=LinkTypes.OPTIONAL_BACK_LIST, - ) - return None - - def check_nested_links(self, link_info: LinkInfo, current_depth: int): - if current_depth == 1: - return - for k, v in get_model_fields(link_info.document_class).items(): - nested_link_info = self.detect_link(v, k) - if nested_link_info is None: - continue - - if link_info.nested_links is None: - link_info.nested_links = {} - link_info.nested_links[k] = nested_link_info - new_depth = ( - current_depth - 1 if current_depth is not None else None - ) - self.check_nested_links(nested_link_info, current_depth=new_depth) - - # Document - - @staticmethod - def set_default_class_vars(cls: Type[Document]): - """ - Set default class variables. - - :param cls: Union[Type[Document], Type[View], Type[UnionDoc]] - Class - to init settings - :return: - """ - cls._children = dict() - cls._parent = None - cls._inheritance_inited = False - cls._class_id = None - cls._link_fields = None - - @staticmethod - def init_cache(cls) -> None: - """ - Init model's cache - :return: None - """ - if cls.get_settings().use_cache: - cls._cache = LRUCache( - capacity=cls.get_settings().cache_capacity, - expiration_time=cls.get_settings().cache_expiration_time, - ) - - def init_document_fields(self, cls) -> None: - """ - Init class fields - :return: None - """ - - if not IS_PYDANTIC_V2: - self.update_forward_refs(cls) - - if cls._link_fields is None: - cls._link_fields = {} - for k, v in get_model_fields(cls).items(): - path = v.alias or k - setattr(cls, k, ExpressionField(path)) - - link_info = self.detect_link(v, k) - depth_level = cls.get_settings().max_nesting_depths_per_field.get( - k, None - ) - if depth_level is None: - depth_level = cls.get_settings().max_nesting_depth - if link_info is not None: - if depth_level > 0 or depth_level is None: - cls._link_fields[k] = link_info - self.check_nested_links( - link_info, current_depth=depth_level - ) - elif depth_level <= 0: - link_info.is_fetchable = False - cls._link_fields[k] = link_info - - cls._check_hidden_fields() - - @staticmethod - def init_actions(cls): - """ - Init event-based actions - """ - ActionRegistry.clean_actions(cls) - for attr in dir(cls): - f = getattr(cls, attr) - if inspect.isfunction(f): - if hasattr(f, "has_action"): - ActionRegistry.add_action( - document_class=cls, - event_types=f.event_types, # type: ignore - action_direction=f.action_direction, # type: ignore - funct=f, - ) - - async def init_document_collection(self, cls): - """ - Init collection for the Document-based class - :param cls: - :return: - """ - cls.set_database(self.database) - - document_settings = cls.get_settings() - - # register in the Union Doc - - if document_settings.union_doc is not None: - name = cls.get_settings().name or cls.__name__ - document_settings.name = document_settings.union_doc.register_doc( - name, cls - ) - document_settings.union_doc_alias = name - - # set a name - - if not document_settings.name: - document_settings.name = cls.__name__ - - # check mongodb version fits - if ( - document_settings.timeseries is not None - and cls._database_major_version < 5 - ): - raise MongoDBVersionError( - "Timeseries are supported by MongoDB version 5 and higher" - ) - - # create pymongo collection - if ( - document_settings.timeseries is not None - and document_settings.name - not in await self.database.list_collection_names( - authorizedCollections=True, nameOnly=True - ) - ): - collection = await self.database.create_collection( - **document_settings.timeseries.build_query( - document_settings.name - ) - ) - else: - collection = self.database[document_settings.name] - - cls.set_collection(collection) - - async def init_indexes(self, cls, allow_index_dropping: bool = False): - """ - Async indexes initializer - """ - collection = cls.get_pymongo_collection() - document_settings = cls.get_settings() - - index_information = await collection.index_information() - - old_indexes = IndexModelField.from_pymongo_index_information( - index_information - ) - new_indexes = [] - - # Indexed field wrapped with Indexed() - indexed_fields = ( - (k, fvalue, get_index_attributes(fvalue)) - for k, fvalue in get_model_fields(cls).items() - ) - found_indexes = [ - IndexModelField( - IndexModel( - [ - ( - fvalue.alias or k, - indexed_attrs[0], - ) - ], - **indexed_attrs[1], - ) - ) - for k, fvalue, indexed_attrs in indexed_fields - if indexed_attrs is not None - ] - - if document_settings.merge_indexes: - result: List[IndexModelField] = [] - for subclass in reversed(cls.mro()): - if issubclass(subclass, Document) and not subclass == Document: - if ( - subclass not in self.inited_classes - and not subclass == cls - ): - await self.init_class(subclass) - if subclass.get_settings().indexes: - result = IndexModelField.merge_indexes( - result, subclass.get_settings().indexes - ) - found_indexes = IndexModelField.merge_indexes( - found_indexes, result - ) - - else: - if document_settings.indexes: - found_indexes = IndexModelField.merge_indexes( - found_indexes, document_settings.indexes - ) - - new_indexes += found_indexes - - # delete indexes - # Only drop indexes if the user specifically allows for it - if allow_index_dropping: - for index in IndexModelField.list_difference( - old_indexes, new_indexes - ): - await collection.drop_index(index.name) - - # create indices - if found_indexes: - new_indexes += await collection.create_indexes( - IndexModelField.list_to_index_model(new_indexes) - ) - - async def init_document(self, cls: Type[Document]) -> Optional[Output]: - """ - Init Document-based class - - :param cls: - :return: - """ - if cls is Document: - return None - - # get db version - build_info = await self.database.command({"buildInfo": 1}) - mongo_version = build_info["version"] - cls._database_major_version = int(mongo_version.split(".")[0]) - if cls not in self.inited_classes: - self.set_default_class_vars(cls) - self.init_settings(cls) - - bases = [b for b in cls.__bases__ if issubclass(b, Document)] - if len(bases) > 1: - return None - parent = bases[0] - output = await self.init_document(parent) - if cls.get_settings().is_root and ( - parent is Document or not parent.get_settings().is_root - ): - if cls.get_collection_name() is None: - cls.set_collection_name(cls.__name__) - output = Output( - class_name=cls.__name__, - collection_name=cls.get_collection_name(), - ) - cls._class_id = cls.__name__ - cls._inheritance_inited = True - elif output is not None: - output.class_name = f"{output.class_name}.{cls.__name__}" - cls._class_id = output.class_name - cls.set_collection_name(output.collection_name) - parent.add_child(cls._class_id, cls) - cls._parent = parent - cls._inheritance_inited = True - - await self.init_document_collection(cls) - if not self.skip_indexes: - await self.init_indexes(cls, self.allow_index_dropping) - self.init_document_fields(cls) - self.init_cache(cls) - self.init_actions(cls) - - self.inited_classes.append(cls) - - return output - - else: - if cls._inheritance_inited is True: - return Output( - class_name=cls._class_id, - collection_name=cls.get_collection_name(), - ) - else: - return None - - # Views - - def init_view_fields(self, cls) -> None: - """ - Init class fields - :return: None - """ - - if cls._link_fields is None: - cls._link_fields = {} - for k, v in get_model_fields(cls).items(): - path = v.alias or k - setattr(cls, k, ExpressionField(path)) - link_info = self.detect_link(v, k) - depth_level = cls.get_settings().max_nesting_depths_per_field.get( - k, None - ) - if depth_level is None: - depth_level = cls.get_settings().max_nesting_depth - if link_info is not None: - if depth_level > 0: - cls._link_fields[k] = link_info - self.check_nested_links( - link_info, current_depth=depth_level - ) - elif depth_level <= 0: - link_info.is_fetchable = False - cls._link_fields[k] = link_info - - def init_view_collection(self, cls): - """ - Init collection for View - - :param cls: - :return: - """ - view_settings = cls.get_settings() - - if view_settings.name is None: - view_settings.name = cls.__name__ - - if inspect.isclass(view_settings.source): - view_settings.source = view_settings.source.get_collection_name() - - view_settings.pymongo_db = self.database - view_settings.pymongo_collection = self.database[view_settings.name] - - async def init_view(self, cls: Type[View]): - """ - Init View-based class - - :param cls: - :return: - """ - self.init_settings(cls) - self.init_view_collection(cls) - self.init_view_fields(cls) - self.init_cache(cls) - - collection_names = await self.database.list_collection_names( - authorizedCollections=True, nameOnly=True - ) - if self.recreate_views or cls._settings.name not in collection_names: - if cls._settings.name in collection_names: - await cls.get_pymongo_collection().drop() - - await self.database.command( - { - "create": cls.get_settings().name, - "viewOn": cls.get_settings().source, - "pipeline": cls.get_settings().pipeline, - } - ) - - # Union Doc - - async def init_union_doc(self, cls: Type[UnionDoc]): - """ - Init Union Doc based class - - :param cls: - :return: - """ - self.init_settings(cls) - if cls._settings.name is None: - cls._settings.name = cls.__name__ - - cls._settings.pymongo_db = self.database - cls._settings.pymongo_collection = self.database[cls._settings.name] - cls._is_inited = True - - # Deprecations - - @staticmethod - def check_deprecations( - cls: Union[Type[Document], Type[View], Type[UnionDoc]], - ): - if hasattr(cls, "Collection"): - raise Deprecation( - "Collection inner class is not supported more. " - "Please use Settings instead. " - "https://beanie-odm.dev/tutorial/defining-a-document/#settings" - ) - - # Final - - async def init_class( - self, cls: Union[Type[Document], Type[View], Type[UnionDoc]] - ): - """ - Init Document, View or UnionDoc based class. - - :param cls: - :return: - """ - self.check_deprecations(cls) - - if issubclass(cls, Document): - await self.init_document(cls) - - if issubclass(cls, View): - await self.init_view(cls) - - if issubclass(cls, UnionDoc): - await self.init_union_doc(cls) - - if hasattr(cls, "custom_init"): - await cls.custom_init() # type: ignore - - -async def init_beanie( - database: Optional[AsyncDatabase] = None, - connection_string: Optional[str] = None, - document_models: Optional[ - Sequence[Union[Type[Document], Type[UnionDoc], Type["View"], str]] - ] = None, - allow_index_dropping: bool = False, - recreate_views: bool = False, - skip_indexes: bool = False, -): - """ - Beanie initialization - - :param database: Optional[AsyncDatabase] - pymongo database instance. Defaults to None. - :param connection_string: Optional[str] - MongoDB connection string. Defaults to None. - :param document_models: List[Union[Type[DocType], Type[UnionDocType], str]] - model classes - or strings with dot separated paths. Defaults to None. - :param allow_index_dropping: bool - if index dropping is allowed. - Defaults to False. - :param recreate_views: bool - if views should be recreated. Defaults to False. - :param skip_indexes: bool - if you want to skip working with the indexes. - Defaults to False. - :return: None - """ - - await Initializer( - database=database, - connection_string=connection_string, - document_models=document_models, - allow_index_dropping=allow_index_dropping, - recreate_views=recreate_views, - skip_indexes=skip_indexes, - ) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/parsing.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/parsing.py deleted file mode 100644 index 406da91d..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/parsing.py +++ /dev/null @@ -1,148 +0,0 @@ -from typing import TYPE_CHECKING, Any, Dict, Type, Union - -from pydantic import BaseModel - -from beanie.exceptions import ( - ApplyChangesException, - DocWasNotRegisteredInUnionClass, - UnionHasNoRegisteredDocs, -) -from beanie.odm.interfaces.detector import ModelType -from beanie.odm.utils.pydantic import get_config_value, parse_model - -if TYPE_CHECKING: - from beanie.odm.documents import Document - - -def merge_models(left: BaseModel, right: BaseModel) -> None: - """ - Merge two models - :param left: left model - :param right: right model - :return: None - """ - from beanie.odm.fields import Link - - for k, right_value in right.__iter__(): - left_value = getattr(left, k, None) - if isinstance(right_value, BaseModel) and isinstance( - left_value, BaseModel - ): - if get_config_value(left_value, "frozen"): - left.__setattr__(k, right_value) - else: - merge_models(left_value, right_value) - continue - if isinstance(right_value, list): - links_found = False - for i in right_value: - if isinstance(i, Link): - links_found = True - break - if links_found: - continue - left.__setattr__(k, right_value) - elif not isinstance(right_value, Link): - left.__setattr__(k, right_value) - - -def apply_changes( - changes: Dict[str, Any], target: Union[BaseModel, Dict[str, Any]] -): - for key, value in changes.items(): - if "." in key: - key_parts = key.split(".") - current_target = target - try: - for part in key_parts[:-1]: - if isinstance(current_target, dict): - current_target = current_target[part] - elif isinstance(current_target, BaseModel): - current_target = getattr(current_target, part) - else: - raise ApplyChangesException( - f"Unexpected type of target: {type(target)}" - ) - final_key = key_parts[-1] - if isinstance(current_target, dict): - current_target[final_key] = value - elif isinstance(current_target, BaseModel): - setattr(current_target, final_key, value) - else: - raise ApplyChangesException( - f"Unexpected type of target: {type(target)}" - ) - except (KeyError, AttributeError) as e: - raise ApplyChangesException( - f"Failed to apply change for key '{key}': {e}" - ) - else: - if isinstance(target, dict): - target[key] = value - elif isinstance(target, BaseModel): - setattr(target, key, value) - else: - raise ApplyChangesException( - f"Unexpected type of target: {type(target)}" - ) - - -def save_state(item: BaseModel): - if hasattr(item, "_save_state"): - item._save_state() # type: ignore - - -def parse_obj( - model: Union[Type[BaseModel], Type["Document"]], - data: Any, - lazy_parse: bool = False, -) -> BaseModel: - if ( - hasattr(model, "get_model_type") - and model.get_model_type() == ModelType.UnionDoc # type: ignore - ): - if model._document_models is None: # type: ignore - raise UnionHasNoRegisteredDocs - - if isinstance(data, dict): - class_name = data[model.get_settings().class_id] # type: ignore - else: - class_name = data._class_id - - if class_name not in model._document_models: # type: ignore - raise DocWasNotRegisteredInUnionClass - return parse_obj( - model=model._document_models[class_name], # type: ignore - data=data, - lazy_parse=lazy_parse, - ) # type: ignore - if ( - hasattr(model, "get_model_type") - and model.get_model_type() == ModelType.Document # type: ignore - and model._inheritance_inited # type: ignore - ): - if isinstance(data, dict): - class_name = data.get(model.get_settings().class_id) # type: ignore - elif hasattr(data, model.get_settings().class_id): # type: ignore - class_name = data._class_id - else: - class_name = None - - if model._children and class_name in model._children: # type: ignore - return parse_obj( - model=model._children[class_name], # type: ignore - data=data, - lazy_parse=lazy_parse, - ) # type: ignore - - if ( - lazy_parse - and hasattr(model, "get_model_type") - and model.get_model_type() == ModelType.Document # type: ignore - ): - o = model.lazy_parse(data, {"_id"}) # type: ignore - o._saved_state = {"_id": o.id} - return o - result = parse_model(model, data) - save_state(result) - return result diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/projection.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/projection.py deleted file mode 100644 index 3be3cc31..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/projection.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Dict, Optional, Type, TypeVar - -from pydantic import BaseModel - -from beanie.odm.interfaces.detector import ModelType -from beanie.odm.utils.pydantic import get_config_value, get_model_fields - -ProjectionModelType = TypeVar("ProjectionModelType", bound=BaseModel) - - -def get_projection( - model: Type[ProjectionModelType], -) -> Optional[Dict[str, int]]: - if hasattr(model, "get_model_type") and ( - model.get_model_type() == ModelType.UnionDoc # type: ignore - or ( # type: ignore - model.get_model_type() == ModelType.Document # type: ignore - and model._inheritance_inited # type: ignore - ) - ): # type: ignore - return None - - if hasattr(model, "Settings"): # MyPy checks - settings = getattr(model, "Settings") - - if hasattr(settings, "projection"): - return getattr(settings, "projection") - - if get_config_value(model, "extra") == "allow": - return None - - document_projection: Dict[str, int] = {} - - for name, field in get_model_fields(model).items(): - document_projection[field.alias or name] = 1 - return document_projection diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/pydantic.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/pydantic.py deleted file mode 100644 index 820b70bd..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/pydantic.py +++ /dev/null @@ -1,65 +0,0 @@ -from typing import Any, Type - -import pydantic -from pydantic import BaseModel - -IS_PYDANTIC_V2 = int(pydantic.VERSION.split(".")[0]) >= 2 -IS_PYDANTIC_V2_10 = ( - IS_PYDANTIC_V2 and int(pydantic.VERSION.split(".")[1]) >= 10 -) - -if IS_PYDANTIC_V2: - from pydantic import TypeAdapter -else: - from pydantic import parse_obj_as - - -def parse_object_as(object_type: Type, data: Any): - if IS_PYDANTIC_V2: - return TypeAdapter(object_type).validate_python(data) - else: - return parse_obj_as(object_type, data) - - -def get_field_type(field): - if IS_PYDANTIC_V2: - return field.annotation - else: - return field.outer_type_ - - -def get_model_fields(model): - if IS_PYDANTIC_V2: - return model.model_fields - else: - return model.__fields__ - - -def parse_model(model_type: Type[BaseModel], data: Any): - if IS_PYDANTIC_V2: - return model_type.model_validate(data) - else: - return model_type.parse_obj(data) - - -def get_extra_field_info(field, parameter: str): - if IS_PYDANTIC_V2: - if isinstance(field.json_schema_extra, dict): - return field.json_schema_extra.get(parameter) - return None - else: - return field.field_info.extra.get(parameter) - - -def get_config_value(model, parameter: str): - if IS_PYDANTIC_V2: - return model.model_config.get(parameter) - else: - return getattr(model.Config, parameter, None) - - -def get_model_dump(model, *args: Any, **kwargs: Any): - if IS_PYDANTIC_V2: - return model.model_dump(*args, **kwargs) - else: - return model.dict(*args, **kwargs) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/relations.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/relations.py deleted file mode 100644 index 538706d9..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/relations.py +++ /dev/null @@ -1,50 +0,0 @@ -from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, Dict -from typing import Mapping as MappingType - -from beanie.odm.fields import ( - ExpressionField, -) - -# from pydantic.fields import ModelField -# from pydantic.typing import get_origin - -if TYPE_CHECKING: - from beanie import Document - - -def convert_ids( - query: MappingType[str, Any], doc: "Document", fetch_links: bool -) -> Dict[str, Any]: - # TODO add all the cases - new_query = {} - for k, v in query.items(): - k_splitted = k.split(".") - if ( - isinstance(k, ExpressionField) - and doc.get_link_fields() is not None - and len(k_splitted) == 2 - and k_splitted[0] in doc.get_link_fields().keys() # type: ignore - and k_splitted[1] == "id" - ): - if fetch_links: - new_k = f"{k_splitted[0]}._id" - else: - new_k = f"{k_splitted[0]}.$id" - else: - new_k = k - new_v: Any - if isinstance(v, Mapping): - new_v = convert_ids(v, doc, fetch_links) - elif isinstance(v, list): - new_v = [ - convert_ids(ele, doc, fetch_links) - if isinstance(ele, Mapping) - else ele - for ele in v - ] - else: - new_v = v - - new_query[new_k] = new_v - return new_query diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/self_validation.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/self_validation.py deleted file mode 100644 index 28462b68..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/self_validation.py +++ /dev/null @@ -1,21 +0,0 @@ -from functools import wraps -from typing import TYPE_CHECKING, TypeVar - -from typing_extensions import ParamSpec - -if TYPE_CHECKING: - from beanie.odm.documents import AsyncDocMethod, DocType - -P = ParamSpec("P") -R = TypeVar("R") - - -def validate_self_before( - f: "AsyncDocMethod[DocType, P, R]", -) -> "AsyncDocMethod[DocType, P, R]": - @wraps(f) - async def wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: - await self.validate_self(*args, **kwargs) - return await f(self, *args, **kwargs) - - return wrapper diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/state.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/state.py deleted file mode 100644 index 80c3ac9b..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/state.py +++ /dev/null @@ -1,93 +0,0 @@ -import inspect -from functools import wraps -from typing import TYPE_CHECKING, TypeVar - -from typing_extensions import ParamSpec - -from beanie.exceptions import StateManagementIsTurnedOff, StateNotSaved - -if TYPE_CHECKING: - from beanie.odm.documents import AnyDocMethod, AsyncDocMethod, DocType - -P = ParamSpec("P") -R = TypeVar("R") - - -def check_if_state_saved(self: "DocType"): - if not self.use_state_management(): - raise StateManagementIsTurnedOff( - "State management is turned off for this document" - ) - if self._saved_state is None: - raise StateNotSaved("No state was saved") - - -def saved_state_needed( - f: "AnyDocMethod[DocType, P, R]", -) -> "AnyDocMethod[DocType, P, R]": - @wraps(f) - def sync_wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: - check_if_state_saved(self) - return f(self, *args, **kwargs) - - @wraps(f) - async def async_wrapper( - self: "DocType", *args: P.args, **kwargs: P.kwargs - ) -> R: - check_if_state_saved(self) - # type ignore because there is no nice/proper way to annotate both sync - # and async case without parametrized TypeVar, which is not supported - return await f(self, *args, **kwargs) # type: ignore[misc] - - if inspect.iscoroutinefunction(f): - # type ignore because there is no nice/proper way to annotate both sync - # and async case without parametrized TypeVar, which is not supported - return async_wrapper # type: ignore[return-value] - return sync_wrapper - - -def check_if_previous_state_saved(self: "DocType"): - if not self.use_state_management(): - raise StateManagementIsTurnedOff( - "State management is turned off for this document" - ) - if not self.state_management_save_previous(): - raise StateManagementIsTurnedOff( - "State management's option to save previous state is turned off for this document" - ) - - -def previous_saved_state_needed( - f: "AnyDocMethod[DocType, P, R]", -) -> "AnyDocMethod[DocType, P, R]": - @wraps(f) - def sync_wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: - check_if_previous_state_saved(self) - return f(self, *args, **kwargs) - - @wraps(f) - async def async_wrapper( - self: "DocType", *args: P.args, **kwargs: P.kwargs - ) -> R: - check_if_previous_state_saved(self) - # type ignore because there is no nice/proper way to annotate both sync - # and async case without parametrized TypeVar, which is not supported - return await f(self, *args, **kwargs) # type: ignore[misc] - - if inspect.iscoroutinefunction(f): - # type ignore because there is no nice/proper way to annotate both sync - # and async case without parametrized TypeVar, which is not supported - return async_wrapper # type: ignore[return-value] - return sync_wrapper - - -def save_state_after( - f: "AsyncDocMethod[DocType, P, R]", -) -> "AsyncDocMethod[DocType, P, R]": - @wraps(f) - async def wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: - result = await f(self, *args, **kwargs) - self._save_state() - return result - - return wrapper diff --git a/venv/lib/python3.12/site-packages/beanie/odm/utils/typing.py b/venv/lib/python3.12/site-packages/beanie/odm/utils/typing.py deleted file mode 100644 index 958c730a..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/utils/typing.py +++ /dev/null @@ -1,74 +0,0 @@ -import inspect -import sys -from typing import Any, Dict, Optional, Tuple, Type - -from beanie.odm.fields import IndexedAnnotation - -from .pydantic import IS_PYDANTIC_V2, get_field_type - -if sys.version_info >= (3, 8): - from typing import get_args, get_origin -else: - from typing_extensions import get_args, get_origin - - -def extract_id_class(annotation) -> Type[Any]: - if get_origin(annotation) is not None: - try: - annotation = next( - arg for arg in get_args(annotation) if arg is not type(None) - ) - except StopIteration: - annotation = None - if inspect.isclass(annotation): - return annotation - raise ValueError("Unknown annotation: {}".format(annotation)) - - -def get_index_attributes(field) -> Optional[Tuple[int, Dict[str, Any]]]: - """Gets the index attributes from the field, if it is indexed. - - :param field: The field to get the index attributes from. - - :return: The index attributes, if the field is indexed. Otherwise, None. - """ - # For fields that are directly typed with `Indexed()`, the type will have - # an `_indexed` attribute. - field_type = get_field_type(field) - if hasattr(field_type, "_indexed"): - return getattr(field_type, "_indexed", None) - - # For fields that are use `Indexed` within `Annotated`, the field will have - # metadata that might contain an `IndexedAnnotation` instance. - if IS_PYDANTIC_V2: - # In Pydantic 2, the field has a `metadata` attribute with - # the annotations. - metadata = getattr(field, "metadata", None) - elif hasattr(field, "annotation") and hasattr( - field.annotation, "__metadata__" - ): - # In Pydantic 1, the field has an `annotation` attribute with the - # type assigned to the field. If the type is annotated, it will - # have a `__metadata__` attribute with the annotations. - metadata = field.annotation.__metadata__ - else: - return None - - if metadata is None: - return None - - try: - iter(metadata) - except TypeError: - return None - - indexed_annotation = next( - ( - annotation - for annotation in metadata - if isinstance(annotation, IndexedAnnotation) - ), - None, - ) - - return getattr(indexed_annotation, "_indexed", None) diff --git a/venv/lib/python3.12/site-packages/beanie/odm/views.py b/venv/lib/python3.12/site-packages/beanie/odm/views.py deleted file mode 100644 index a38985f6..00000000 --- a/venv/lib/python3.12/site-packages/beanie/odm/views.py +++ /dev/null @@ -1,71 +0,0 @@ -import asyncio -from typing import Any, ClassVar, Dict, Optional, Union - -from pydantic import BaseModel - -from beanie.exceptions import ViewWasNotInitialized -from beanie.odm.fields import Link, LinkInfo -from beanie.odm.interfaces.aggregate import AggregateInterface -from beanie.odm.interfaces.detector import DetectionInterface, ModelType -from beanie.odm.interfaces.find import FindInterface -from beanie.odm.interfaces.getters import OtherGettersInterface -from beanie.odm.settings.view import ViewSettings - - -class View( - BaseModel, - FindInterface, - AggregateInterface, - OtherGettersInterface, - DetectionInterface, -): - """ - What is needed: - - Source collection or view - pipeline - - """ - - # Relations - _link_fields: ClassVar[Optional[Dict[str, LinkInfo]]] = None - - # Settings - _settings: ClassVar[ViewSettings] - - @classmethod - def get_settings(cls) -> ViewSettings: - """ - Get view settings, which was created on - the initialization step - - :return: ViewSettings class - """ - if cls._settings is None: - raise ViewWasNotInitialized - return cls._settings - - async def fetch_link(self, field: Union[str, Any]): - ref_obj = getattr(self, field, None) - if isinstance(ref_obj, Link): - value = await ref_obj.fetch(fetch_links=True) - setattr(self, field, value) - if isinstance(ref_obj, list) and ref_obj: - values = await Link.fetch_list(ref_obj, fetch_links=True) - setattr(self, field, values) - - async def fetch_all_links(self): - coros = [] - link_fields = self.get_link_fields() - if link_fields is not None: - for ref in link_fields.values(): - coros.append(self.fetch_link(ref.field_name)) # TODO lists - await asyncio.gather(*coros) - - @classmethod - def get_link_fields(cls) -> Optional[Dict[str, LinkInfo]]: - return cls._link_fields - - @classmethod - def get_model_type(cls) -> ModelType: - return ModelType.View diff --git a/venv/lib/python3.12/site-packages/beanie/operators.py b/venv/lib/python3.12/site-packages/beanie/operators.py deleted file mode 100644 index aa339e0f..00000000 --- a/venv/lib/python3.12/site-packages/beanie/operators.py +++ /dev/null @@ -1,118 +0,0 @@ -from beanie.odm.operators.find.array import All, ElemMatch, Size -from beanie.odm.operators.find.bitwise import ( - BitsAllClear, - BitsAllSet, - BitsAnyClear, - BitsAnySet, -) -from beanie.odm.operators.find.comparison import ( - GT, - GTE, - LT, - LTE, - NE, - Eq, - In, - NotIn, -) -from beanie.odm.operators.find.element import Exists, Type -from beanie.odm.operators.find.evaluation import ( - Expr, - JsonSchema, - Mod, - RegEx, - Text, - Where, -) -from beanie.odm.operators.find.geospatial import ( - Box, - GeoIntersects, - GeoWithin, - GeoWithinTypes, - Near, - NearSphere, -) -from beanie.odm.operators.find.logical import And, Nor, Not, Or -from beanie.odm.operators.update.array import ( - AddToSet, - Pop, - Pull, - PullAll, - Push, -) -from beanie.odm.operators.update.bitwise import Bit -from beanie.odm.operators.update.general import ( - CurrentDate, - Inc, - Max, - Min, - Mul, - Rename, - Set, - SetOnInsert, - Unset, -) - -__all__ = [ - # Find - # Array - "All", - "ElemMatch", - "Size", - # Bitwise - "BitsAllClear", - "BitsAllSet", - "BitsAnyClear", - "BitsAnySet", - # Comparison - "Eq", - "GT", - "GTE", - "In", - "NotIn", - "LT", - "LTE", - "NE", - # Element - "Exists", - "Type", - "Type", - # Evaluation - "Expr", - "JsonSchema", - "Mod", - "RegEx", - "Text", - "Where", - # Geospatial - "GeoIntersects", - "GeoWithinTypes", - "GeoWithin", - "Box", - "Near", - "NearSphere", - # Logical - "Or", - "And", - "Nor", - "Not", - # Update - # Array - "AddToSet", - "Pop", - "Pull", - "Push", - "PullAll", - # Bitwise - "Bit", - # General - "Set", - "CurrentDate", - "Inc", - "Min", - "Max", - "Mul", - "Rename", - "SetOnInsert", - "Unset", -] diff --git a/venv/lib/python3.12/site-packages/beanie/py.typed b/venv/lib/python3.12/site-packages/beanie/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/bson/__init__.py b/venv/lib/python3.12/site-packages/bson/__init__.py deleted file mode 100644 index 790ac06e..00000000 --- a/venv/lib/python3.12/site-packages/bson/__init__.py +++ /dev/null @@ -1,1484 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""BSON (Binary JSON) encoding and decoding. - -The mapping from Python types to BSON types is as follows: - -======================================= ============= =================== -Python Type BSON Type Supported Direction -======================================= ============= =================== -None null both -bool boolean both -int [#int]_ int32 / int64 py -> bson -:class:`bson.int64.Int64` int64 both -float number (real) both -str string both -list array both -dict object both -:class:`~bson.son.SON` object both -:py:class:`~collections.abc.Mapping` object py -> bson -:class:`~bson.raw_bson.RawBSONDocument` object both [#raw]_ -datetime.datetime [#dt]_ [#dt2]_ UTC datetime both -:class:`~bson.datetime_ms.DatetimeMS` UTC datetime both [#dt3]_ -:class:`~bson.regex.Regex` regex both -compiled re [#re]_ regex py -> bson -:class:`~bson.binary.Binary` binary both -:py:class:`uuid.UUID` [#uuid]_ binary both -:class:`~bson.objectid.ObjectId` oid both -:class:`~bson.dbref.DBRef` dbref both -:class:`~bson.dbref.DBRef` dbpointer bson -> py -None undefined bson -> py -:class:`~bson.code.Code` code both -str symbol bson -> py -bytes [#bytes]_ binary both -:class:`~bson.timestamp.Timestamp` timestamp both -:class:`~bson.decimal128.Decimal128` decimal128 both -:class:`~bson.min_key.MinKey` min key both -:class:`~bson.max_key.MaxKey` max key both -======================================= ============= =================== - -.. [#int] A Python int will be saved as a BSON int32 or BSON int64 depending - on its size. A BSON int32 will always decode to a Python int. A BSON - int64 will always decode to a :class:`~bson.int64.Int64`. -.. [#raw] Decoding a bson object to :class:`~bson.raw_bson.RawBSONDocument` can be - optionally configured via :attr:`~bson.codec_options.CodecOptions.document_class`. -.. [#dt] datetime.datetime instances are encoded with millisecond precision so - the microsecond field is truncated. -.. [#dt2] all datetime.datetime instances are encoded as UTC. By default, they - are decoded as *naive* but timezone aware datetimes are also supported. - See :doc:`/examples/datetimes` for examples. -.. [#dt3] To enable decoding a bson UTC datetime to a :class:`~bson.datetime_ms.DatetimeMS` - instance see :ref:`handling-out-of-range-datetimes`. -.. [#uuid] For :py:class:`uuid.UUID` encoding and decoding behavior see :doc:`/examples/uuid`. -.. [#re] :class:`~bson.regex.Regex` instances and regular expression - objects from ``re.compile()`` are both saved as BSON regular expressions. - BSON regular expressions are decoded as :class:`~bson.regex.Regex` - instances. -.. [#bytes] The bytes type is encoded as BSON binary with - subtype 0. It will be decoded back to bytes. -""" -from __future__ import annotations - -import datetime -import itertools -import os -import re -import struct -import sys -import uuid -from codecs import utf_8_decode as _utf_8_decode -from codecs import utf_8_encode as _utf_8_encode -from collections import abc as _abc -from typing import ( - IO, - TYPE_CHECKING, - Any, - BinaryIO, - Callable, - Generator, - Iterator, - Mapping, - MutableMapping, - NoReturn, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, - overload, -) - -from bson.binary import ( - ALL_UUID_SUBTYPES, - CSHARP_LEGACY, - JAVA_LEGACY, - OLD_UUID_SUBTYPE, - STANDARD, - UUID_SUBTYPE, - Binary, - UuidRepresentation, -) -from bson.code import Code -from bson.codec_options import ( - DEFAULT_CODEC_OPTIONS, - CodecOptions, - DatetimeConversion, - _raw_document_class, -) -from bson.datetime_ms import ( - EPOCH_AWARE, - EPOCH_NAIVE, - DatetimeMS, - _datetime_to_millis, - _millis_to_datetime, -) -from bson.dbref import DBRef -from bson.decimal128 import Decimal128 -from bson.errors import InvalidBSON, InvalidDocument, InvalidStringData -from bson.int64 import Int64 -from bson.max_key import MaxKey -from bson.min_key import MinKey -from bson.objectid import ObjectId -from bson.regex import Regex -from bson.son import RE_TYPE, SON -from bson.timestamp import Timestamp -from bson.tz_util import utc - -# Import some modules for type-checking only. -if TYPE_CHECKING: - from bson.raw_bson import RawBSONDocument - from bson.typings import _DocumentType, _ReadableBuffer - -try: - from bson import _cbson # type: ignore[attr-defined] - - _USE_C = True -except ImportError: - _USE_C = False - -__all__ = [ - "ALL_UUID_SUBTYPES", - "CSHARP_LEGACY", - "JAVA_LEGACY", - "OLD_UUID_SUBTYPE", - "STANDARD", - "UUID_SUBTYPE", - "Binary", - "UuidRepresentation", - "Code", - "DEFAULT_CODEC_OPTIONS", - "CodecOptions", - "DBRef", - "Decimal128", - "InvalidBSON", - "InvalidDocument", - "InvalidStringData", - "Int64", - "MaxKey", - "MinKey", - "ObjectId", - "Regex", - "RE_TYPE", - "SON", - "Timestamp", - "utc", - "EPOCH_AWARE", - "EPOCH_NAIVE", - "BSONNUM", - "BSONSTR", - "BSONOBJ", - "BSONARR", - "BSONBIN", - "BSONUND", - "BSONOID", - "BSONBOO", - "BSONDAT", - "BSONNUL", - "BSONRGX", - "BSONREF", - "BSONCOD", - "BSONSYM", - "BSONCWS", - "BSONINT", - "BSONTIM", - "BSONLON", - "BSONDEC", - "BSONMIN", - "BSONMAX", - "get_data_and_view", - "gen_list_name", - "encode", - "decode", - "decode_all", - "decode_iter", - "decode_file_iter", - "is_valid", - "BSON", - "has_c", - "DatetimeConversion", - "DatetimeMS", -] - -BSONNUM = b"\x01" # Floating point -BSONSTR = b"\x02" # UTF-8 string -BSONOBJ = b"\x03" # Embedded document -BSONARR = b"\x04" # Array -BSONBIN = b"\x05" # Binary -BSONUND = b"\x06" # Undefined -BSONOID = b"\x07" # ObjectId -BSONBOO = b"\x08" # Boolean -BSONDAT = b"\x09" # UTC Datetime -BSONNUL = b"\x0A" # Null -BSONRGX = b"\x0B" # Regex -BSONREF = b"\x0C" # DBRef -BSONCOD = b"\x0D" # Javascript code -BSONSYM = b"\x0E" # Symbol -BSONCWS = b"\x0F" # Javascript code with scope -BSONINT = b"\x10" # 32bit int -BSONTIM = b"\x11" # Timestamp -BSONLON = b"\x12" # 64bit int -BSONDEC = b"\x13" # Decimal128 -BSONMIN = b"\xFF" # Min key -BSONMAX = b"\x7F" # Max key - - -_UNPACK_FLOAT_FROM = struct.Struct(" Tuple[Any, memoryview]: - if isinstance(data, (bytes, bytearray)): - return data, memoryview(data) - view = memoryview(data) - return view.tobytes(), view - - -def _raise_unknown_type(element_type: int, element_name: str) -> NoReturn: - """Unknown type helper.""" - raise InvalidBSON( - "Detected unknown BSON type {!r} for fieldname '{}'. Are " - "you using the latest driver version?".format(chr(element_type).encode(), element_name) - ) - - -def _get_int( - data: Any, _view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any -) -> Tuple[int, int]: - """Decode a BSON int32 to python int.""" - return _UNPACK_INT_FROM(data, position)[0], position + 4 - - -def _get_c_string(data: Any, view: Any, position: int, opts: CodecOptions[Any]) -> Tuple[str, int]: - """Decode a BSON 'C' string to python str.""" - end = data.index(b"\x00", position) - return _utf_8_decode(view[position:end], opts.unicode_decode_error_handler, True)[0], end + 1 - - -def _get_float( - data: Any, _view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any -) -> Tuple[float, int]: - """Decode a BSON double to python float.""" - return _UNPACK_FLOAT_FROM(data, position)[0], position + 8 - - -def _get_string( - data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions[Any], dummy: Any -) -> Tuple[str, int]: - """Decode a BSON string to python str.""" - length = _UNPACK_INT_FROM(data, position)[0] - position += 4 - if length < 1 or obj_end - position < length: - raise InvalidBSON("invalid string length") - end = position + length - 1 - if data[end] != 0: - raise InvalidBSON("invalid end of string") - return _utf_8_decode(view[position:end], opts.unicode_decode_error_handler, True)[0], end + 1 - - -def _get_object_size(data: Any, position: int, obj_end: int) -> Tuple[int, int]: - """Validate and return a BSON document's size.""" - try: - obj_size = _UNPACK_INT_FROM(data, position)[0] - except struct.error as exc: - raise InvalidBSON(str(exc)) from None - end = position + obj_size - 1 - if end >= obj_end: - raise InvalidBSON("invalid object length") - if data[end] != 0: - raise InvalidBSON("bad eoo") - # If this is the top-level document, validate the total size too. - if position == 0 and obj_size != obj_end: - raise InvalidBSON("invalid object length") - return obj_size, end - - -def _get_object( - data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions[Any], dummy: Any -) -> Tuple[Any, int]: - """Decode a BSON subdocument to opts.document_class or bson.dbref.DBRef.""" - obj_size, end = _get_object_size(data, position, obj_end) - if _raw_document_class(opts.document_class): - return (opts.document_class(data[position : end + 1], opts), position + obj_size) - - obj = _elements_to_dict(data, view, position + 4, end, opts) - - position += obj_size - # If DBRef validation fails, return a normal doc. - if ( - isinstance(obj.get("$ref"), str) - and "$id" in obj - and isinstance(obj.get("$db"), (str, type(None))) - ): - return (DBRef(obj.pop("$ref"), obj.pop("$id", None), obj.pop("$db", None), obj), position) - return obj, position - - -def _get_array( - data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions[Any], element_name: str -) -> Tuple[Any, int]: - """Decode a BSON array to python list.""" - size = _UNPACK_INT_FROM(data, position)[0] - end = position + size - 1 - if data[end] != 0: - raise InvalidBSON("bad eoo") - - position += 4 - end -= 1 - result: list[Any] = [] - - # Avoid doing global and attribute lookups in the loop. - append = result.append - index = data.index - getter = _ELEMENT_GETTER - decoder_map = opts.type_registry._decoder_map - - while position < end: - element_type = data[position] - # Just skip the keys. - position = index(b"\x00", position) + 1 - try: - value, position = getter[element_type]( - data, view, position, obj_end, opts, element_name - ) - except KeyError: - _raise_unknown_type(element_type, element_name) - - if decoder_map: - custom_decoder = decoder_map.get(type(value)) - if custom_decoder is not None: - value = custom_decoder(value) - - append(value) - - if position != end + 1: - raise InvalidBSON("bad array length") - return result, position + 1 - - -def _get_binary( - data: Any, _view: Any, position: int, obj_end: int, opts: CodecOptions[Any], dummy1: Any -) -> Tuple[Union[Binary, uuid.UUID], int]: - """Decode a BSON binary to bson.binary.Binary or python UUID.""" - length, subtype = _UNPACK_LENGTH_SUBTYPE_FROM(data, position) - position += 5 - if subtype == 2: - length2 = _UNPACK_INT_FROM(data, position)[0] - position += 4 - if length2 != length - 4: - raise InvalidBSON("invalid binary (st 2) - lengths don't match!") - length = length2 - end = position + length - if length < 0 or end > obj_end: - raise InvalidBSON("bad binary object length") - - # Convert UUID subtypes to native UUIDs. - if subtype in ALL_UUID_SUBTYPES: - uuid_rep = opts.uuid_representation - binary_value = Binary(data[position:end], subtype) - if ( - (uuid_rep == UuidRepresentation.UNSPECIFIED) - or (subtype == UUID_SUBTYPE and uuid_rep != STANDARD) - or (subtype == OLD_UUID_SUBTYPE and uuid_rep == STANDARD) - ): - return binary_value, end - return binary_value.as_uuid(uuid_rep), end - - # Decode subtype 0 to 'bytes'. - if subtype == 0: - value = data[position:end] - else: - value = Binary(data[position:end], subtype) - - return value, end - - -def _get_oid( - data: Any, _view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any -) -> Tuple[ObjectId, int]: - """Decode a BSON ObjectId to bson.objectid.ObjectId.""" - end = position + 12 - return ObjectId(data[position:end]), end - - -def _get_boolean( - data: Any, _view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any -) -> Tuple[bool, int]: - """Decode a BSON true/false to python True/False.""" - end = position + 1 - boolean_byte = data[position:end] - if boolean_byte == b"\x00": - return False, end - elif boolean_byte == b"\x01": - return True, end - raise InvalidBSON("invalid boolean value: %r" % boolean_byte) - - -def _get_date( - data: Any, _view: Any, position: int, dummy0: int, opts: CodecOptions[Any], dummy1: Any -) -> Tuple[Union[datetime.datetime, DatetimeMS], int]: - """Decode a BSON datetime to python datetime.datetime.""" - return _millis_to_datetime(_UNPACK_LONG_FROM(data, position)[0], opts), position + 8 - - -def _get_code( - data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions[Any], element_name: str -) -> Tuple[Code, int]: - """Decode a BSON code to bson.code.Code.""" - code, position = _get_string(data, view, position, obj_end, opts, element_name) - return Code(code), position - - -def _get_code_w_scope( - data: Any, view: Any, position: int, _obj_end: int, opts: CodecOptions[Any], element_name: str -) -> Tuple[Code, int]: - """Decode a BSON code_w_scope to bson.code.Code.""" - code_end = position + _UNPACK_INT_FROM(data, position)[0] - code, position = _get_string(data, view, position + 4, code_end, opts, element_name) - scope, position = _get_object(data, view, position, code_end, opts, element_name) - if position != code_end: - raise InvalidBSON("scope outside of javascript code boundaries") - return Code(code, scope), position - - -def _get_regex( - data: Any, view: Any, position: int, dummy0: Any, opts: CodecOptions[Any], dummy1: Any -) -> Tuple[Regex[Any], int]: - """Decode a BSON regex to bson.regex.Regex or a python pattern object.""" - pattern, position = _get_c_string(data, view, position, opts) - bson_flags, position = _get_c_string(data, view, position, opts) - bson_re = Regex(pattern, bson_flags) - return bson_re, position - - -def _get_ref( - data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions[Any], element_name: str -) -> Tuple[DBRef, int]: - """Decode (deprecated) BSON DBPointer to bson.dbref.DBRef.""" - collection, position = _get_string(data, view, position, obj_end, opts, element_name) - oid, position = _get_oid(data, view, position, obj_end, opts, element_name) - return DBRef(collection, oid), position - - -def _get_timestamp( - data: Any, _view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any -) -> Tuple[Timestamp, int]: - """Decode a BSON timestamp to bson.timestamp.Timestamp.""" - inc, timestamp = _UNPACK_TIMESTAMP_FROM(data, position) - return Timestamp(timestamp, inc), position + 8 - - -def _get_int64( - data: Any, _view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any -) -> Tuple[Int64, int]: - """Decode a BSON int64 to bson.int64.Int64.""" - return Int64(_UNPACK_LONG_FROM(data, position)[0]), position + 8 - - -def _get_decimal128( - data: Any, _view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any -) -> Tuple[Decimal128, int]: - """Decode a BSON decimal128 to bson.decimal128.Decimal128.""" - end = position + 16 - return Decimal128.from_bid(data[position:end]), end - - -# Each decoder function's signature is: -# - data: bytes -# - view: memoryview that references `data` -# - position: int, beginning of object in 'data' to decode -# - obj_end: int, end of object to decode in 'data' if variable-length type -# - opts: a CodecOptions -_ELEMENT_GETTER: dict[int, Callable[..., Tuple[Any, int]]] = { - ord(BSONNUM): _get_float, - ord(BSONSTR): _get_string, - ord(BSONOBJ): _get_object, - ord(BSONARR): _get_array, - ord(BSONBIN): _get_binary, - ord(BSONUND): lambda u, v, w, x, y, z: (None, w), # noqa: ARG005 # Deprecated undefined - ord(BSONOID): _get_oid, - ord(BSONBOO): _get_boolean, - ord(BSONDAT): _get_date, - ord(BSONNUL): lambda u, v, w, x, y, z: (None, w), # noqa: ARG005 - ord(BSONRGX): _get_regex, - ord(BSONREF): _get_ref, # Deprecated DBPointer - ord(BSONCOD): _get_code, - ord(BSONSYM): _get_string, # Deprecated symbol - ord(BSONCWS): _get_code_w_scope, - ord(BSONINT): _get_int, - ord(BSONTIM): _get_timestamp, - ord(BSONLON): _get_int64, - ord(BSONDEC): _get_decimal128, - ord(BSONMIN): lambda u, v, w, x, y, z: (MinKey(), w), # noqa: ARG005 - ord(BSONMAX): lambda u, v, w, x, y, z: (MaxKey(), w), # noqa: ARG005 -} - - -if _USE_C: - - def _element_to_dict( - data: Any, - view: Any, # noqa: ARG001 - position: int, - obj_end: int, - opts: CodecOptions[Any], - raw_array: bool = False, - ) -> Tuple[str, Any, int]: - return cast( - "Tuple[str, Any, int]", - _cbson._element_to_dict(data, position, obj_end, opts, raw_array), - ) - -else: - - def _element_to_dict( - data: Any, - view: Any, - position: int, - obj_end: int, - opts: CodecOptions[Any], - raw_array: bool = False, - ) -> Tuple[str, Any, int]: - """Decode a single key, value pair.""" - element_type = data[position] - position += 1 - element_name, position = _get_c_string(data, view, position, opts) - if raw_array and element_type == ord(BSONARR): - _, end = _get_object_size(data, position, len(data)) - return element_name, view[position : end + 1], end + 1 - try: - value, position = _ELEMENT_GETTER[element_type]( - data, view, position, obj_end, opts, element_name - ) - except KeyError: - _raise_unknown_type(element_type, element_name) - - if opts.type_registry._decoder_map: - custom_decoder = opts.type_registry._decoder_map.get(type(value)) - if custom_decoder is not None: - value = custom_decoder(value) - - return element_name, value, position - - -_T = TypeVar("_T", bound=MutableMapping[str, Any]) - - -def _raw_to_dict( - data: Any, - position: int, - obj_end: int, - opts: CodecOptions[RawBSONDocument], - result: _T, - raw_array: bool = False, -) -> _T: - data, view = get_data_and_view(data) - return cast( - _T, _elements_to_dict(data, view, position, obj_end, opts, result, raw_array=raw_array) - ) - - -def _elements_to_dict( - data: Any, - view: Any, - position: int, - obj_end: int, - opts: CodecOptions[Any], - result: Any = None, - raw_array: bool = False, -) -> Any: - """Decode a BSON document into result.""" - if result is None: - result = opts.document_class() - end = obj_end - 1 - while position < end: - key, value, position = _element_to_dict( - data, view, position, obj_end, opts, raw_array=raw_array - ) - result[key] = value - if position != obj_end: - raise InvalidBSON("bad object or element length") - return result - - -def _bson_to_dict(data: Any, opts: CodecOptions[_DocumentType]) -> _DocumentType: - """Decode a BSON string to document_class.""" - data, view = get_data_and_view(data) - try: - if _raw_document_class(opts.document_class): - return opts.document_class(data, opts) # type:ignore[call-arg] - _, end = _get_object_size(data, 0, len(data)) - return cast("_DocumentType", _elements_to_dict(data, view, 4, end, opts)) - except InvalidBSON: - raise - except Exception: - # Change exception type to InvalidBSON but preserve traceback. - _, exc_value, exc_tb = sys.exc_info() - raise InvalidBSON(str(exc_value)).with_traceback(exc_tb) from None - - -if _USE_C: - _bson_to_dict = _cbson._bson_to_dict - - -_PACK_FLOAT = struct.Struct(" Generator[bytes, None, None]: - """Generate "keys" for encoded lists in the sequence - b"0\x00", b"1\x00", b"2\x00", ... - - The first 1000 keys are returned from a pre-built cache. All - subsequent keys are generated on the fly. - """ - yield from _LIST_NAMES - - counter = itertools.count(1000) - while True: - yield (str(next(counter)) + "\x00").encode("utf8") - - -def _make_c_string_check(string: Union[str, bytes]) -> bytes: - """Make a 'C' string, checking for embedded NUL characters.""" - if isinstance(string, bytes): - if b"\x00" in string: - raise InvalidDocument("BSON keys / regex patterns must not contain a NUL character") - try: - _utf_8_decode(string, None, True) - return string + b"\x00" - except UnicodeError: - raise InvalidStringData( - "strings in documents must be valid UTF-8: %r" % string - ) from None - else: - if "\x00" in string: - raise InvalidDocument("BSON keys / regex patterns must not contain a NUL character") - return _utf_8_encode(string)[0] + b"\x00" - - -def _make_c_string(string: Union[str, bytes]) -> bytes: - """Make a 'C' string.""" - if isinstance(string, bytes): - try: - _utf_8_decode(string, None, True) - return string + b"\x00" - except UnicodeError: - raise InvalidStringData( - "strings in documents must be valid UTF-8: %r" % string - ) from None - else: - return _utf_8_encode(string)[0] + b"\x00" - - -def _make_name(string: str) -> bytes: - """Make a 'C' string suitable for a BSON key.""" - if "\x00" in string: - raise InvalidDocument("BSON keys must not contain a NUL character") - return _utf_8_encode(string)[0] + b"\x00" - - -def _encode_float(name: bytes, value: float, dummy0: Any, dummy1: Any) -> bytes: - """Encode a float.""" - return b"\x01" + name + _PACK_FLOAT(value) - - -def _encode_bytes(name: bytes, value: bytes, dummy0: Any, dummy1: Any) -> bytes: - """Encode a python bytes.""" - # Python3 special case. Store 'bytes' as BSON binary subtype 0. - return b"\x05" + name + _PACK_INT(len(value)) + b"\x00" + value - - -def _encode_mapping(name: bytes, value: Any, check_keys: bool, opts: CodecOptions[Any]) -> bytes: - """Encode a mapping type.""" - if _raw_document_class(value): - return b"\x03" + name + cast(bytes, value.raw) - data = b"".join([_element_to_bson(key, val, check_keys, opts) for key, val in value.items()]) - return b"\x03" + name + _PACK_INT(len(data) + 5) + data + b"\x00" - - -def _encode_dbref(name: bytes, value: DBRef, check_keys: bool, opts: CodecOptions[Any]) -> bytes: - """Encode bson.dbref.DBRef.""" - buf = bytearray(b"\x03" + name + b"\x00\x00\x00\x00") - begin = len(buf) - 4 - - buf += _name_value_to_bson(b"$ref\x00", value.collection, check_keys, opts) - buf += _name_value_to_bson(b"$id\x00", value.id, check_keys, opts) - if value.database is not None: - buf += _name_value_to_bson(b"$db\x00", value.database, check_keys, opts) - for key, val in value._DBRef__kwargs.items(): - buf += _element_to_bson(key, val, check_keys, opts) - - buf += b"\x00" - buf[begin : begin + 4] = _PACK_INT(len(buf) - begin) - return bytes(buf) - - -def _encode_list( - name: bytes, value: Sequence[Any], check_keys: bool, opts: CodecOptions[Any] -) -> bytes: - """Encode a list/tuple.""" - lname = gen_list_name() - data = b"".join([_name_value_to_bson(next(lname), item, check_keys, opts) for item in value]) - return b"\x04" + name + _PACK_INT(len(data) + 5) + data + b"\x00" - - -def _encode_text(name: bytes, value: str, dummy0: Any, dummy1: Any) -> bytes: - """Encode a python str.""" - bvalue = _utf_8_encode(value)[0] - return b"\x02" + name + _PACK_INT(len(bvalue) + 1) + bvalue + b"\x00" - - -def _encode_binary(name: bytes, value: Binary, dummy0: Any, dummy1: Any) -> bytes: - """Encode bson.binary.Binary.""" - subtype = value.subtype - if subtype == 2: - value = _PACK_INT(len(value)) + value # type: ignore - return b"\x05" + name + _PACK_LENGTH_SUBTYPE(len(value), subtype) + value - - -def _encode_uuid(name: bytes, value: uuid.UUID, dummy: Any, opts: CodecOptions[Any]) -> bytes: - """Encode uuid.UUID.""" - uuid_representation = opts.uuid_representation - binval = Binary.from_uuid(value, uuid_representation=uuid_representation) - return _encode_binary(name, binval, dummy, opts) - - -def _encode_objectid(name: bytes, value: ObjectId, dummy: Any, dummy1: Any) -> bytes: - """Encode bson.objectid.ObjectId.""" - return b"\x07" + name + value.binary - - -def _encode_bool(name: bytes, value: bool, dummy0: Any, dummy1: Any) -> bytes: - """Encode a python boolean (True/False).""" - return b"\x08" + name + (value and b"\x01" or b"\x00") - - -def _encode_datetime(name: bytes, value: datetime.datetime, dummy0: Any, dummy1: Any) -> bytes: - """Encode datetime.datetime.""" - millis = _datetime_to_millis(value) - return b"\x09" + name + _PACK_LONG(millis) - - -def _encode_datetime_ms(name: bytes, value: DatetimeMS, dummy0: Any, dummy1: Any) -> bytes: - """Encode datetime.datetime.""" - millis = int(value) - return b"\x09" + name + _PACK_LONG(millis) - - -def _encode_none(name: bytes, dummy0: Any, dummy1: Any, dummy2: Any) -> bytes: - """Encode python None.""" - return b"\x0A" + name - - -def _encode_regex(name: bytes, value: Regex[Any], dummy0: Any, dummy1: Any) -> bytes: - """Encode a python regex or bson.regex.Regex.""" - flags = value.flags - # Python 3 common case - if flags == re.UNICODE: - return b"\x0B" + name + _make_c_string_check(value.pattern) + b"u\x00" - elif flags == 0: - return b"\x0B" + name + _make_c_string_check(value.pattern) + b"\x00" - else: - sflags = b"" - if flags & re.IGNORECASE: - sflags += b"i" - if flags & re.LOCALE: - sflags += b"l" - if flags & re.MULTILINE: - sflags += b"m" - if flags & re.DOTALL: - sflags += b"s" - if flags & re.UNICODE: - sflags += b"u" - if flags & re.VERBOSE: - sflags += b"x" - sflags += b"\x00" - return b"\x0B" + name + _make_c_string_check(value.pattern) + sflags - - -def _encode_code(name: bytes, value: Code, dummy: Any, opts: CodecOptions[Any]) -> bytes: - """Encode bson.code.Code.""" - cstring = _make_c_string(value) - cstrlen = len(cstring) - if value.scope is None: - return b"\x0D" + name + _PACK_INT(cstrlen) + cstring - scope = _dict_to_bson(value.scope, False, opts, False) - full_length = _PACK_INT(8 + cstrlen + len(scope)) - return b"\x0F" + name + full_length + _PACK_INT(cstrlen) + cstring + scope - - -def _encode_int(name: bytes, value: int, dummy0: Any, dummy1: Any) -> bytes: - """Encode a python int.""" - if -2147483648 <= value <= 2147483647: - return b"\x10" + name + _PACK_INT(value) - else: - try: - return b"\x12" + name + _PACK_LONG(value) - except struct.error: - raise OverflowError("BSON can only handle up to 8-byte ints") from None - - -def _encode_timestamp(name: bytes, value: Any, dummy0: Any, dummy1: Any) -> bytes: - """Encode bson.timestamp.Timestamp.""" - return b"\x11" + name + _PACK_TIMESTAMP(value.inc, value.time) - - -def _encode_long(name: bytes, value: Any, dummy0: Any, dummy1: Any) -> bytes: - """Encode a bson.int64.Int64.""" - try: - return b"\x12" + name + _PACK_LONG(value) - except struct.error: - raise OverflowError("BSON can only handle up to 8-byte ints") from None - - -def _encode_decimal128(name: bytes, value: Decimal128, dummy0: Any, dummy1: Any) -> bytes: - """Encode bson.decimal128.Decimal128.""" - return b"\x13" + name + value.bid - - -def _encode_minkey(name: bytes, dummy0: Any, dummy1: Any, dummy2: Any) -> bytes: - """Encode bson.min_key.MinKey.""" - return b"\xFF" + name - - -def _encode_maxkey(name: bytes, dummy0: Any, dummy1: Any, dummy2: Any) -> bytes: - """Encode bson.max_key.MaxKey.""" - return b"\x7F" + name - - -# Each encoder function's signature is: -# - name: utf-8 bytes -# - value: a Python data type, e.g. a Python int for _encode_int -# - check_keys: bool, whether to check for invalid names -# - opts: a CodecOptions -_ENCODERS = { - bool: _encode_bool, - bytes: _encode_bytes, - datetime.datetime: _encode_datetime, - DatetimeMS: _encode_datetime_ms, - dict: _encode_mapping, - float: _encode_float, - int: _encode_int, - list: _encode_list, - str: _encode_text, - tuple: _encode_list, - type(None): _encode_none, - uuid.UUID: _encode_uuid, - Binary: _encode_binary, - Int64: _encode_long, - Code: _encode_code, - DBRef: _encode_dbref, - MaxKey: _encode_maxkey, - MinKey: _encode_minkey, - ObjectId: _encode_objectid, - Regex: _encode_regex, - RE_TYPE: _encode_regex, - SON: _encode_mapping, - Timestamp: _encode_timestamp, - Decimal128: _encode_decimal128, - # Special case. This will never be looked up directly. - _abc.Mapping: _encode_mapping, -} - -# Map each _type_marker to its encoder for faster lookup. -_MARKERS = {} -for _typ in _ENCODERS: - if hasattr(_typ, "_type_marker"): - _MARKERS[_typ._type_marker] = _ENCODERS[_typ] - - -_BUILT_IN_TYPES = tuple(t for t in _ENCODERS) - - -def _name_value_to_bson( - name: bytes, - value: Any, - check_keys: bool, - opts: CodecOptions[Any], - in_custom_call: bool = False, - in_fallback_call: bool = False, -) -> bytes: - """Encode a single name, value pair.""" - - was_integer_overflow = False - - # First see if the type is already cached. KeyError will only ever - # happen once per subtype. - try: - return _ENCODERS[type(value)](name, value, check_keys, opts) # type: ignore - except KeyError: - pass - except OverflowError: - if not isinstance(value, int): - raise - - # Give the fallback_encoder a chance - was_integer_overflow = True - - # Second, fall back to trying _type_marker. This has to be done - # before the loop below since users could subclass one of our - # custom types that subclasses a python built-in (e.g. Binary) - marker = getattr(value, "_type_marker", None) - if isinstance(marker, int) and marker in _MARKERS: - func = _MARKERS[marker] - # Cache this type for faster subsequent lookup. - _ENCODERS[type(value)] = func - return func(name, value, check_keys, opts) # type: ignore - - # Third, check if a type encoder is registered for this type. - # Note that subtypes of registered custom types are not auto-encoded. - if not in_custom_call and opts.type_registry._encoder_map: - custom_encoder = opts.type_registry._encoder_map.get(type(value)) - if custom_encoder is not None: - return _name_value_to_bson( - name, custom_encoder(value), check_keys, opts, in_custom_call=True - ) - - # Fourth, test each base type. This will only happen once for - # a subtype of a supported base type. Unlike in the C-extensions, this - # is done after trying the custom type encoder because checking for each - # subtype is expensive. - for base in _BUILT_IN_TYPES: - if not was_integer_overflow and isinstance(value, base): - func = _ENCODERS[base] - # Cache this type for faster subsequent lookup. - _ENCODERS[type(value)] = func - return func(name, value, check_keys, opts) # type: ignore - - # As a last resort, try using the fallback encoder, if the user has - # provided one. - fallback_encoder = opts.type_registry._fallback_encoder - if not in_fallback_call and fallback_encoder is not None: - return _name_value_to_bson( - name, fallback_encoder(value), check_keys, opts, in_fallback_call=True - ) - - if was_integer_overflow: - raise OverflowError("BSON can only handle up to 8-byte ints") - raise InvalidDocument(f"cannot encode object: {value!r}, of type: {type(value)!r}") - - -def _element_to_bson(key: Any, value: Any, check_keys: bool, opts: CodecOptions[Any]) -> bytes: - """Encode a single key, value pair.""" - if not isinstance(key, str): - raise InvalidDocument(f"documents must have only string keys, key was {key!r}") - if check_keys: - if key.startswith("$"): - raise InvalidDocument(f"key {key!r} must not start with '$'") - if "." in key: - raise InvalidDocument(f"key {key!r} must not contain '.'") - - name = _make_name(key) - return _name_value_to_bson(name, value, check_keys, opts) - - -def _dict_to_bson( - doc: Any, check_keys: bool, opts: CodecOptions[Any], top_level: bool = True -) -> bytes: - """Encode a document to BSON.""" - if _raw_document_class(doc): - return cast(bytes, doc.raw) - try: - elements = [] - if top_level and "_id" in doc: - elements.append(_name_value_to_bson(b"_id\x00", doc["_id"], check_keys, opts)) - for key, value in doc.items(): - if not top_level or key != "_id": - try: - elements.append(_element_to_bson(key, value, check_keys, opts)) - except InvalidDocument as err: - raise InvalidDocument(f"Invalid document {doc} | {err}") from err - except AttributeError: - raise TypeError(f"encoder expected a mapping type but got: {doc!r}") from None - - encoded = b"".join(elements) - return _PACK_INT(len(encoded) + 5) + encoded + b"\x00" - - -if _USE_C: - _dict_to_bson = _cbson._dict_to_bson - - -_CODEC_OPTIONS_TYPE_ERROR = TypeError("codec_options must be an instance of CodecOptions") - - -def encode( - document: Mapping[str, Any], - check_keys: bool = False, - codec_options: CodecOptions[Any] = DEFAULT_CODEC_OPTIONS, -) -> bytes: - """Encode a document to BSON. - - A document can be any mapping type (like :class:`dict`). - - Raises :class:`TypeError` if `document` is not a mapping type, - or contains keys that are not instances of :class:`str`. Raises - :class:`~bson.errors.InvalidDocument` if `document` cannot be - converted to :class:`BSON`. - - :param document: mapping type representing a document - :param check_keys: check if keys start with '$' or - contain '.', raising :class:`~bson.errors.InvalidDocument` in - either case - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions`. - - .. versionadded:: 3.9 - """ - if not isinstance(codec_options, CodecOptions): - raise _CODEC_OPTIONS_TYPE_ERROR - - return _dict_to_bson(document, check_keys, codec_options) - - -@overload -def decode(data: _ReadableBuffer, codec_options: None = None) -> dict[str, Any]: - ... - - -@overload -def decode(data: _ReadableBuffer, codec_options: CodecOptions[_DocumentType]) -> _DocumentType: - ... - - -def decode( - data: _ReadableBuffer, codec_options: Optional[CodecOptions[_DocumentType]] = None -) -> Union[dict[str, Any], _DocumentType]: - """Decode BSON to a document. - - By default, returns a BSON document represented as a Python - :class:`dict`. To use a different :class:`MutableMapping` class, - configure a :class:`~bson.codec_options.CodecOptions`:: - - >>> import collections # From Python standard library. - >>> import bson - >>> from bson.codec_options import CodecOptions - >>> data = bson.encode({'a': 1}) - >>> decoded_doc = bson.decode(data) - - >>> options = CodecOptions(document_class=collections.OrderedDict) - >>> decoded_doc = bson.decode(data, codec_options=options) - >>> type(decoded_doc) - - - :param data: the BSON to decode. Any bytes-like object that implements - the buffer protocol. - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions`. - - .. versionadded:: 3.9 - """ - opts: CodecOptions[Any] = codec_options or DEFAULT_CODEC_OPTIONS - if not isinstance(opts, CodecOptions): - raise _CODEC_OPTIONS_TYPE_ERROR - - return cast("Union[dict[str, Any], _DocumentType]", _bson_to_dict(data, opts)) - - -def _decode_all(data: _ReadableBuffer, opts: CodecOptions[_DocumentType]) -> list[_DocumentType]: - """Decode a BSON data to multiple documents.""" - data, view = get_data_and_view(data) - data_len = len(data) - docs: list[_DocumentType] = [] - position = 0 - end = data_len - 1 - use_raw = _raw_document_class(opts.document_class) - try: - while position < end: - obj_size = _UNPACK_INT_FROM(data, position)[0] - if data_len - position < obj_size: - raise InvalidBSON("invalid object size") - obj_end = position + obj_size - 1 - if data[obj_end] != 0: - raise InvalidBSON("bad eoo") - if use_raw: - docs.append(opts.document_class(data[position : obj_end + 1], opts)) # type: ignore - else: - docs.append(_elements_to_dict(data, view, position + 4, obj_end, opts)) - position += obj_size - return docs - except InvalidBSON: - raise - except Exception: - # Change exception type to InvalidBSON but preserve traceback. - _, exc_value, exc_tb = sys.exc_info() - raise InvalidBSON(str(exc_value)).with_traceback(exc_tb) from None - - -if _USE_C: - _decode_all = _cbson._decode_all - - -@overload -def decode_all(data: _ReadableBuffer, codec_options: None = None) -> list[dict[str, Any]]: - ... - - -@overload -def decode_all( - data: _ReadableBuffer, codec_options: CodecOptions[_DocumentType] -) -> list[_DocumentType]: - ... - - -def decode_all( - data: _ReadableBuffer, codec_options: Optional[CodecOptions[_DocumentType]] = None -) -> Union[list[dict[str, Any]], list[_DocumentType]]: - """Decode BSON data to multiple documents. - - `data` must be a bytes-like object implementing the buffer protocol that - provides concatenated, valid, BSON-encoded documents. - - :param data: BSON data - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions`. - - .. versionchanged:: 3.9 - Supports bytes-like objects that implement the buffer protocol. - - .. versionchanged:: 3.0 - Removed `compile_re` option: PyMongo now always represents BSON regular - expressions as :class:`~bson.regex.Regex` objects. Use - :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a - BSON regular expression to a Python regular expression object. - - Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with - `codec_options`. - """ - if codec_options is None: - return _decode_all(data, DEFAULT_CODEC_OPTIONS) - - if not isinstance(codec_options, CodecOptions): - raise _CODEC_OPTIONS_TYPE_ERROR - - return _decode_all(data, codec_options) - - -def _decode_selective( - rawdoc: Any, fields: Any, codec_options: CodecOptions[_DocumentType] -) -> _DocumentType: - if _raw_document_class(codec_options.document_class): - # If document_class is RawBSONDocument, use vanilla dictionary for - # decoding command response. - doc: _DocumentType = {} # type:ignore[assignment] - else: - # Else, use the specified document_class. - doc = codec_options.document_class() - for key, value in rawdoc.items(): - if key in fields: - if fields[key] == 1: - doc[key] = _bson_to_dict(rawdoc.raw, codec_options)[key] # type:ignore[index] - else: - doc[key] = _decode_selective( # type:ignore[index] - value, fields[key], codec_options - ) - else: - doc[key] = value # type:ignore[index] - return doc - - -def _array_of_documents_to_buffer(data: Union[memoryview, bytes]) -> bytes: - # Extract the raw bytes of each document. - position = 0 - view = memoryview(data) - _, end = _get_object_size(view, position, len(view)) - position += 4 - buffers: list[memoryview] = [] - append = buffers.append - while position < end - 1: - # Just skip the keys. - while view[position] != 0: - position += 1 - position += 1 - obj_size, _ = _get_object_size(view, position, end) - append(view[position : position + obj_size]) - position += obj_size - if position != end: - raise InvalidBSON("bad object or element length") - return b"".join(buffers) - - -if _USE_C: - _array_of_documents_to_buffer = _cbson._array_of_documents_to_buffer - - -def _convert_raw_document_lists_to_streams(document: Any) -> None: - """Convert raw array of documents to a stream of BSON documents.""" - cursor = document.get("cursor") - if not cursor: - return - for key in ("firstBatch", "nextBatch"): - batch = cursor.get(key) - if not batch: - continue - data = _array_of_documents_to_buffer(batch) - if data: - cursor[key] = [data] - else: - cursor[key] = [] - - -def _decode_all_selective( - data: Any, codec_options: CodecOptions[_DocumentType], fields: Any -) -> list[_DocumentType]: - """Decode BSON data to a single document while using user-provided - custom decoding logic. - - `data` must be a string representing a valid, BSON-encoded document. - - :param data: BSON data - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions` with user-specified type - decoders. If no decoders are found, this method is the same as - ``decode_all``. - :param fields: Map of document namespaces where data that needs - to be custom decoded lives or None. For example, to custom decode a - list of objects in 'field1.subfield1', the specified value should be - ``{'field1': {'subfield1': 1}}``. If ``fields`` is an empty map or - None, this method is the same as ``decode_all``. - - :return: Single-member list containing the decoded document. - - .. versionadded:: 3.8 - """ - if not codec_options.type_registry._decoder_map: - return decode_all(data, codec_options) - - if not fields: - return decode_all(data, codec_options.with_options(type_registry=None)) - - # Decode documents for internal use. - from bson.raw_bson import RawBSONDocument - - internal_codec_options: CodecOptions[RawBSONDocument] = codec_options.with_options( - document_class=RawBSONDocument, type_registry=None - ) - _doc = _bson_to_dict(data, internal_codec_options) - return [ - _decode_selective( - _doc, - fields, - codec_options, - ) - ] - - -@overload -def decode_iter(data: bytes, codec_options: None = None) -> Iterator[dict[str, Any]]: - ... - - -@overload -def decode_iter(data: bytes, codec_options: CodecOptions[_DocumentType]) -> Iterator[_DocumentType]: - ... - - -def decode_iter( - data: bytes, codec_options: Optional[CodecOptions[_DocumentType]] = None -) -> Union[Iterator[dict[str, Any]], Iterator[_DocumentType]]: - """Decode BSON data to multiple documents as a generator. - - Works similarly to the decode_all function, but yields one document at a - time. - - `data` must be a string of concatenated, valid, BSON-encoded - documents. - - :param data: BSON data - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions`. - - .. versionchanged:: 3.0 - Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with - `codec_options`. - - .. versionadded:: 2.8 - """ - opts = codec_options or DEFAULT_CODEC_OPTIONS - if not isinstance(opts, CodecOptions): - raise _CODEC_OPTIONS_TYPE_ERROR - - position = 0 - end = len(data) - 1 - while position < end: - obj_size = _UNPACK_INT_FROM(data, position)[0] - elements = data[position : position + obj_size] - position += obj_size - - yield _bson_to_dict(elements, opts) # type:ignore[misc] - - -@overload -def decode_file_iter( - file_obj: Union[BinaryIO, IO[bytes]], codec_options: None = None -) -> Iterator[dict[str, Any]]: - ... - - -@overload -def decode_file_iter( - file_obj: Union[BinaryIO, IO[bytes]], codec_options: CodecOptions[_DocumentType] -) -> Iterator[_DocumentType]: - ... - - -def decode_file_iter( - file_obj: Union[BinaryIO, IO[bytes]], - codec_options: Optional[CodecOptions[_DocumentType]] = None, -) -> Union[Iterator[dict[str, Any]], Iterator[_DocumentType]]: - """Decode bson data from a file to multiple documents as a generator. - - Works similarly to the decode_all function, but reads from the file object - in chunks and parses bson in chunks, yielding one document at a time. - - :param file_obj: A file object containing BSON data. - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions`. - - .. versionchanged:: 3.0 - Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with - `codec_options`. - - .. versionadded:: 2.8 - """ - opts = codec_options or DEFAULT_CODEC_OPTIONS - while True: - # Read size of next object. - size_data: Any = file_obj.read(4) - if not size_data: - break # Finished with file normally. - elif len(size_data) != 4: - raise InvalidBSON("cut off in middle of objsize") - obj_size = _UNPACK_INT_FROM(size_data, 0)[0] - 4 - elements = size_data + file_obj.read(max(0, obj_size)) - yield _bson_to_dict(elements, opts) # type:ignore[arg-type, misc] - - -def is_valid(bson: bytes) -> bool: - """Check that the given string represents valid :class:`BSON` data. - - Raises :class:`TypeError` if `bson` is not an instance of - :class:`bytes`. Returns ``True`` - if `bson` is valid :class:`BSON`, ``False`` otherwise. - - :param bson: the data to be validated - """ - if not isinstance(bson, bytes): - raise TypeError(f"BSON data must be an instance of a subclass of bytes, not {type(bson)}") - - try: - _bson_to_dict(bson, DEFAULT_CODEC_OPTIONS) - return True - except Exception: - return False - - -class BSON(bytes): - """BSON (Binary JSON) data. - - .. warning:: Using this class to encode and decode BSON adds a performance - cost. For better performance use the module level functions - :func:`encode` and :func:`decode` instead. - """ - - @classmethod - def encode( - cls: Type[BSON], - document: Mapping[str, Any], - check_keys: bool = False, - codec_options: CodecOptions[Any] = DEFAULT_CODEC_OPTIONS, - ) -> BSON: - """Encode a document to a new :class:`BSON` instance. - - A document can be any mapping type (like :class:`dict`). - - Raises :class:`TypeError` if `document` is not a mapping type, - or contains keys that are not instances of - :class:`str'. Raises :class:`~bson.errors.InvalidDocument` - if `document` cannot be converted to :class:`BSON`. - - :param document: mapping type representing a document - :param check_keys: check if keys start with '$' or - contain '.', raising :class:`~bson.errors.InvalidDocument` in - either case - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions`. - - .. versionchanged:: 3.0 - Replaced `uuid_subtype` option with `codec_options`. - """ - return cls(encode(document, check_keys, codec_options)) - - def decode( # type:ignore[override] - self, codec_options: CodecOptions[Any] = DEFAULT_CODEC_OPTIONS - ) -> dict[str, Any]: - """Decode this BSON data. - - By default, returns a BSON document represented as a Python - :class:`dict`. To use a different :class:`MutableMapping` class, - configure a :class:`~bson.codec_options.CodecOptions`:: - - >>> import collections # From Python standard library. - >>> import bson - >>> from bson.codec_options import CodecOptions - >>> data = bson.BSON.encode({'a': 1}) - >>> decoded_doc = bson.BSON(data).decode() - - >>> options = CodecOptions(document_class=collections.OrderedDict) - >>> decoded_doc = bson.BSON(data).decode(codec_options=options) - >>> type(decoded_doc) - - - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions`. - - .. versionchanged:: 3.0 - Removed `compile_re` option: PyMongo now always represents BSON - regular expressions as :class:`~bson.regex.Regex` objects. Use - :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a - BSON regular expression to a Python regular expression object. - - Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with - `codec_options`. - """ - return decode(self, codec_options) - - -def has_c() -> bool: - """Is the C extension installed?""" - return _USE_C - - -def _after_fork() -> None: - """Releases the ObjectID lock child.""" - if ObjectId._inc_lock.locked(): - ObjectId._inc_lock.release() - - -if hasattr(os, "register_at_fork"): - # This will run in the same thread as the fork was called. - # If we fork in a critical region on the same thread, it should break. - # This is fine since we would never call fork directly from a critical region. - os.register_at_fork(after_in_child=_after_fork) diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 6a33d492..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/_helpers.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/_helpers.cpython-312.pyc deleted file mode 100644 index 1b197795..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/_helpers.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/binary.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/binary.cpython-312.pyc deleted file mode 100644 index 3cbfbb6a..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/binary.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/code.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/code.cpython-312.pyc deleted file mode 100644 index 7a6e274b..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/code.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/codec_options.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/codec_options.cpython-312.pyc deleted file mode 100644 index d0351b4f..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/codec_options.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/datetime_ms.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/datetime_ms.cpython-312.pyc deleted file mode 100644 index 6d574b5b..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/datetime_ms.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/dbref.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/dbref.cpython-312.pyc deleted file mode 100644 index 48a4dc33..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/dbref.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/decimal128.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/decimal128.cpython-312.pyc deleted file mode 100644 index fa704643..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/decimal128.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/errors.cpython-312.pyc deleted file mode 100644 index d6e96cf6..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/errors.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/int64.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/int64.cpython-312.pyc deleted file mode 100644 index 7852a57a..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/int64.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/json_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/json_util.cpython-312.pyc deleted file mode 100644 index b8ed2121..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/json_util.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/max_key.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/max_key.cpython-312.pyc deleted file mode 100644 index 9d0eeebc..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/max_key.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/min_key.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/min_key.cpython-312.pyc deleted file mode 100644 index 7471a4a7..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/min_key.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/objectid.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/objectid.cpython-312.pyc deleted file mode 100644 index 3b7fbd3d..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/objectid.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/raw_bson.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/raw_bson.cpython-312.pyc deleted file mode 100644 index a3957292..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/raw_bson.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/regex.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/regex.cpython-312.pyc deleted file mode 100644 index c51e5cd8..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/regex.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/son.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/son.cpython-312.pyc deleted file mode 100644 index 182eb00f..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/son.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/timestamp.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/timestamp.cpython-312.pyc deleted file mode 100644 index 4d4d8857..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/timestamp.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/typings.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/typings.cpython-312.pyc deleted file mode 100644 index 7347a173..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/typings.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/__pycache__/tz_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/bson/__pycache__/tz_util.cpython-312.pyc deleted file mode 100644 index 2e1eeed4..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/__pycache__/tz_util.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/bson/_cbson.cpython-310-x86_64-linux-gnu.so deleted file mode 100755 index 253250f0..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-310-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/bson/_cbson.cpython-311-x86_64-linux-gnu.so deleted file mode 100755 index d249f906..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-311-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/bson/_cbson.cpython-312-x86_64-linux-gnu.so deleted file mode 100755 index 1ce9c22c..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-312-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-39-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/bson/_cbson.cpython-39-x86_64-linux-gnu.so deleted file mode 100755 index 73711bed..00000000 Binary files a/venv/lib/python3.12/site-packages/bson/_cbson.cpython-39-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/bson/_cbsonmodule.c b/venv/lib/python3.12/site-packages/bson/_cbsonmodule.c deleted file mode 100644 index be91e417..00000000 --- a/venv/lib/python3.12/site-packages/bson/_cbsonmodule.c +++ /dev/null @@ -1,3274 +0,0 @@ -/* - * Copyright 2009-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * This file contains C implementations of some of the functions - * needed by the bson module. If possible, these implementations - * should be used to speed up BSON encoding and decoding. - */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#include "datetime.h" - -#include "buffer.h" -#include "time64.h" - -#define _CBSON_MODULE -#include "_cbsonmodule.h" - -/* New module state and initialization code. - * See the module-initialization-and-state - * section in the following doc: - * http://docs.python.org/release/3.1.3/howto/cporting.html - * which references the following pep: - * http://www.python.org/dev/peps/pep-3121/ - * */ -struct module_state { - PyObject* Binary; - PyObject* Code; - PyObject* ObjectId; - PyObject* DBRef; - PyObject* Regex; - PyObject* UUID; - PyObject* Timestamp; - PyObject* MinKey; - PyObject* MaxKey; - PyObject* UTC; - PyTypeObject* REType; - PyObject* BSONInt64; - PyObject* Decimal128; - PyObject* Mapping; - PyObject* DatetimeMS; - PyObject* min_datetime; - PyObject* max_datetime; - PyObject* replace_args; - PyObject* replace_kwargs; - PyObject* _type_marker_str; - PyObject* _flags_str; - PyObject* _pattern_str; - PyObject* _encoder_map_str; - PyObject* _decoder_map_str; - PyObject* _fallback_encoder_str; - PyObject* _raw_str; - PyObject* _subtype_str; - PyObject* _binary_str; - PyObject* _scope_str; - PyObject* _inc_str; - PyObject* _time_str; - PyObject* _bid_str; - PyObject* _replace_str; - PyObject* _astimezone_str; - PyObject* _id_str; - PyObject* _dollar_ref_str; - PyObject* _dollar_id_str; - PyObject* _dollar_db_str; - PyObject* _tzinfo_str; - PyObject* _as_doc_str; - PyObject* _utcoffset_str; - PyObject* _from_uuid_str; - PyObject* _as_uuid_str; - PyObject* _from_bid_str; - int64_t min_millis; - int64_t max_millis; -}; - -#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m)) - -/* Maximum number of regex flags */ -#define FLAGS_SIZE 7 - -/* Default UUID representation type code. */ -#define PYTHON_LEGACY 3 - -/* Other UUID representations. */ -#define STANDARD 4 -#define JAVA_LEGACY 5 -#define CSHARP_LEGACY 6 -#define UNSPECIFIED 0 - -#define BSON_MAX_SIZE 2147483647 -/* The smallest possible BSON document, i.e. "{}" */ -#define BSON_MIN_SIZE 5 - -/* Datetime codec options */ -#define DATETIME 1 -#define DATETIME_CLAMP 2 -#define DATETIME_MS 3 -#define DATETIME_AUTO 4 - -/* Converts integer to its string representation in decimal notation. */ -extern int cbson_long_long_to_str(long long num, char* str, size_t size) { - // Buffer should fit 64-bit signed integer - if (size < 21) { - PyErr_Format( - PyExc_RuntimeError, - "Buffer too small to hold long long: %d < 21", size); - return -1; - } - int index = 0; - int sign = 1; - // Convert to unsigned to handle -LLONG_MIN overflow - unsigned long long absNum; - // Handle the case of 0 - if (num == 0) { - str[index++] = '0'; - str[index] = '\0'; - return 0; - } - // Handle negative numbers - if (num < 0) { - sign = -1; - absNum = 0ULL - (unsigned long long)num; - } else { - absNum = (unsigned long long)num; - } - // Convert the number to string - unsigned long long digit; - while (absNum > 0) { - digit = absNum % 10ULL; - str[index++] = (char)digit + '0'; // Convert digit to character - absNum /= 10; - } - // Add minus sign if negative - if (sign == -1) { - str[index++] = '-'; - } - str[index] = '\0'; // Null terminator - // Reverse the string - int start = 0; - int end = index - 1; - while (start < end) { - char temp = str[start]; - str[start++] = str[end]; - str[end--] = temp; - } - return 0; -} - -static PyObject* _test_long_long_to_str(PyObject* self, PyObject* args) { - // Test extreme values - Py_ssize_t maxNum = PY_SSIZE_T_MAX; - Py_ssize_t minNum = PY_SSIZE_T_MIN; - Py_ssize_t num; - char str_1[BUF_SIZE]; - char str_2[BUF_SIZE]; - int res = LL2STR(str_1, (long long)minNum); - if (res == -1) { - return NULL; - } - INT2STRING(str_2, (long long)minNum); - if (strcmp(str_1, str_2) != 0) { - PyErr_Format( - PyExc_RuntimeError, - "LL2STR != INT2STRING: %s != %s", str_1, str_2); - return NULL; - } - LL2STR(str_1, (long long)maxNum); - INT2STRING(str_2, (long long)maxNum); - if (strcmp(str_1, str_2) != 0) { - PyErr_Format( - PyExc_RuntimeError, - "LL2STR != INT2STRING: %s != %s", str_1, str_2); - return NULL; - } - - // Test common values - for (num = 0; num < 10000; num++) { - char str_1[BUF_SIZE]; - char str_2[BUF_SIZE]; - LL2STR(str_1, (long long)num); - INT2STRING(str_2, (long long)num); - if (strcmp(str_1, str_2) != 0) { - PyErr_Format( - PyExc_RuntimeError, - "LL2STR != INT2STRING: %s != %s", str_1, str_2); - return NULL; - } - } - - return args; -} - -/* Get an error class from the bson.errors module. - * - * Returns a new ref */ -static PyObject* _error(char* name) { - PyObject* error = NULL; - PyObject* errors = PyImport_ImportModule("bson.errors"); - if (!errors) { - return NULL; - } - error = PyObject_GetAttrString(errors, name); - Py_DECREF(errors); - return error; -} - -/* Safely downcast from Py_ssize_t to int, setting an - * exception and returning -1 on error. */ -static int -_downcast_and_check(Py_ssize_t size, uint8_t extra) { - if (size > BSON_MAX_SIZE || ((BSON_MAX_SIZE - extra) < size)) { - PyObject* InvalidStringData = _error("InvalidStringData"); - if (InvalidStringData) { - PyErr_SetString(InvalidStringData, - "String length must be <= 2147483647"); - Py_DECREF(InvalidStringData); - } - return -1; - } - return (int)size + extra; -} - -static PyObject* elements_to_dict(PyObject* self, const char* string, - unsigned max, - const codec_options_t* options); - -static int _write_element_to_buffer(PyObject* self, buffer_t buffer, - int type_byte, PyObject* value, - unsigned char check_keys, - const codec_options_t* options, - unsigned char in_custom_call, - unsigned char in_fallback_call); - -/* Write a RawBSONDocument to the buffer. - * Returns the number of bytes written or 0 on failure. - */ -static int write_raw_doc(buffer_t buffer, PyObject* raw, PyObject* _raw); - -/* Date stuff */ -static PyObject* datetime_from_millis(long long millis) { - /* To encode a datetime instance like datetime(9999, 12, 31, 23, 59, 59, 999999) - * we follow these steps: - * 1. Calculate a timestamp in seconds: 253402300799 - * 2. Multiply that by 1000: 253402300799000 - * 3. Add in microseconds divided by 1000 253402300799999 - * - * (Note: BSON doesn't support microsecond accuracy, hence the truncation.) - * - * To decode we could do: - * 1. Get seconds: timestamp / 1000: 253402300799 - * 2. Get micros: (timestamp % 1000) * 1000: 999000 - * Resulting in datetime(9999, 12, 31, 23, 59, 59, 999000) -- the expected result - * - * Now what if the we encode (1, 1, 1, 1, 1, 1, 111111)? - * 1. and 2. gives: -62135593139000 - * 3. Gives us: -62135593138889 - * - * Now decode: - * 1. Gives us: -62135593138 - * 2. Gives us: -889000 - * Resulting in datetime(1, 1, 1, 1, 1, 2, 15888216) -- an invalid result - * - * If instead to decode we do: - * diff = ((millis % 1000) + 1000) % 1000: 111 - * seconds = (millis - diff) / 1000: -62135593139 - * micros = diff * 1000 111000 - * Resulting in datetime(1, 1, 1, 1, 1, 1, 111000) -- the expected result - */ - PyObject* datetime = NULL; - int diff = (int)(((millis % 1000) + 1000) % 1000); - int microseconds = diff * 1000; - Time64_T seconds = (millis - diff) / 1000; - struct TM timeinfo; - cbson_gmtime64_r(&seconds, &timeinfo); - - datetime = PyDateTime_FromDateAndTime(timeinfo.tm_year + 1900, - timeinfo.tm_mon + 1, - timeinfo.tm_mday, - timeinfo.tm_hour, - timeinfo.tm_min, - timeinfo.tm_sec, - microseconds); - if(!datetime) { - PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; - - /* - * Calling _error clears the error state, so fetch it first. - */ - PyErr_Fetch(&etype, &evalue, &etrace); - - /* Only add addition error message on ValueError exceptions. */ - if (PyErr_GivenExceptionMatches(etype, PyExc_ValueError)) { - if (evalue) { - PyObject* err_msg = PyObject_Str(evalue); - if (err_msg) { - PyObject* appendage = PyUnicode_FromString(" (Consider Using CodecOptions(datetime_conversion=DATETIME_AUTO) or MongoClient(datetime_conversion='DATETIME_AUTO')). See: https://www.mongodb.com/docs/languages/python/pymongo-driver/current/data-formats/dates-and-times/#handling-out-of-range-datetimes"); - if (appendage) { - PyObject* msg = PyUnicode_Concat(err_msg, appendage); - if (msg) { - Py_DECREF(evalue); - evalue = msg; - } - } - Py_XDECREF(appendage); - } - Py_XDECREF(err_msg); - } - PyErr_NormalizeException(&etype, &evalue, &etrace); - } - /* Steals references to args. */ - PyErr_Restore(etype, evalue, etrace); - } - return datetime; -} - -static long long millis_from_datetime(PyObject* datetime) { - struct TM timeinfo; - long long millis; - - timeinfo.tm_year = PyDateTime_GET_YEAR(datetime) - 1900; - timeinfo.tm_mon = PyDateTime_GET_MONTH(datetime) - 1; - timeinfo.tm_mday = PyDateTime_GET_DAY(datetime); - timeinfo.tm_hour = PyDateTime_DATE_GET_HOUR(datetime); - timeinfo.tm_min = PyDateTime_DATE_GET_MINUTE(datetime); - timeinfo.tm_sec = PyDateTime_DATE_GET_SECOND(datetime); - - millis = cbson_timegm64(&timeinfo) * 1000; - millis += PyDateTime_DATE_GET_MICROSECOND(datetime) / 1000; - return millis; -} - -/* Extended-range datetime, returns a DatetimeMS object with millis */ -static PyObject* datetime_ms_from_millis(PyObject* self, long long millis){ - // Allocate a new DatetimeMS object. - struct module_state *state = GETSTATE(self); - if (!state) { - return NULL; - } - - PyObject* dt = NULL; - PyObject* ll_millis = NULL; - - if (!(ll_millis = PyLong_FromLongLong(millis))){ - return NULL; - } - dt = PyObject_CallFunctionObjArgs(state->DatetimeMS, ll_millis, NULL); - Py_DECREF(ll_millis); - return dt; -} - -/* Extended-range datetime, takes a DatetimeMS object and extracts the long long value. */ -static int millis_from_datetime_ms(PyObject* dt, long long* out){ - PyObject* ll_millis; - long long millis; - - if (!(ll_millis = PyNumber_Long(dt))){ - return 0; - } - millis = PyLong_AsLongLong(ll_millis); - Py_DECREF(ll_millis); - if (millis == -1 && PyErr_Occurred()) { /* Overflow */ - PyErr_SetString(PyExc_OverflowError, - "MongoDB datetimes can only handle up to 8-byte ints"); - return 0; - } - *out = millis; - return 1; -} - -static PyObject* decode_datetime(PyObject* self, long long millis, const codec_options_t* options){ - PyObject* naive = NULL; - PyObject* replace = NULL; - PyObject* value = NULL; - struct module_state *state = GETSTATE(self); - if (!state) { - goto invalid; - } - if (options->datetime_conversion == DATETIME_MS){ - return datetime_ms_from_millis(self, millis); - } - - int dt_clamp = options->datetime_conversion == DATETIME_CLAMP; - int dt_auto = options->datetime_conversion == DATETIME_AUTO; - - if (dt_clamp || dt_auto){ - int64_t min_millis = state->min_millis; - int64_t max_millis = state->max_millis; - int64_t min_millis_offset = 0; - int64_t max_millis_offset = 0; - if (options->tz_aware && options->tzinfo && options->tzinfo != Py_None) { - PyObject* utcoffset = PyObject_CallMethodObjArgs(options->tzinfo, state->_utcoffset_str, state->min_datetime, NULL); - if (utcoffset == NULL) { - return 0; - } - if (utcoffset != Py_None) { - if (!PyDelta_Check(utcoffset)) { - PyObject* BSONError = _error("BSONError"); - if (BSONError) { - PyErr_SetString(BSONError, "tzinfo.utcoffset() did not return a datetime.timedelta"); - Py_DECREF(BSONError); - } - Py_DECREF(utcoffset); - return 0; - } - min_millis_offset = (PyDateTime_DELTA_GET_DAYS(utcoffset) * (int64_t)86400 + - PyDateTime_DELTA_GET_SECONDS(utcoffset)) * (int64_t)1000 + - (PyDateTime_DELTA_GET_MICROSECONDS(utcoffset) / 1000); - } - Py_DECREF(utcoffset); - utcoffset = PyObject_CallMethodObjArgs(options->tzinfo, state->_utcoffset_str, state->max_datetime, NULL); - if (utcoffset == NULL) { - return 0; - } - if (utcoffset != Py_None) { - if (!PyDelta_Check(utcoffset)) { - PyObject* BSONError = _error("BSONError"); - if (BSONError) { - PyErr_SetString(BSONError, "tzinfo.utcoffset() did not return a datetime.timedelta"); - Py_DECREF(BSONError); - } - Py_DECREF(utcoffset); - return 0; - } - max_millis_offset = (PyDateTime_DELTA_GET_DAYS(utcoffset) * (int64_t)86400 + - PyDateTime_DELTA_GET_SECONDS(utcoffset)) * (int64_t)1000 + - (PyDateTime_DELTA_GET_MICROSECONDS(utcoffset) / 1000); - } - Py_DECREF(utcoffset); - } - if (min_millis_offset < 0) { - min_millis -= min_millis_offset; - } - - if (max_millis_offset > 0) { - max_millis -= max_millis_offset; - } - - if (dt_clamp) { - if (millis < min_millis) { - millis = min_millis; - } else if (millis > max_millis) { - millis = max_millis; - } - // Continues from here to return a datetime. - } else { // dt_auto - if (millis < min_millis || millis > max_millis){ - return datetime_ms_from_millis(self, millis); - } - } - } - - naive = datetime_from_millis(millis); - if (!naive) { - goto invalid; - } - - if (!options->tz_aware) { /* In the naive case, we're done here. */ - return naive; - } - replace = PyObject_GetAttr(naive, state->_replace_str); - if (!replace) { - goto invalid; - } - value = PyObject_Call(replace, state->replace_args, state->replace_kwargs); - if (!value) { - goto invalid; - } - - /* convert to local time */ - if (options->tzinfo != Py_None) { - PyObject* temp = PyObject_CallMethodObjArgs(value, state->_astimezone_str, options->tzinfo, NULL); - Py_DECREF(value); - value = temp; - } -invalid: - Py_XDECREF(naive); - Py_XDECREF(replace); - return value; -} - -/* Just make this compatible w/ the old API. */ -int buffer_write_bytes(buffer_t buffer, const char* data, int size) { - if (pymongo_buffer_write(buffer, data, size)) { - return 0; - } - return 1; -} - -int buffer_write_double(buffer_t buffer, double data) { - double data_le = BSON_DOUBLE_TO_LE(data); - return buffer_write_bytes(buffer, (const char*)&data_le, 8); -} - -int buffer_write_int32(buffer_t buffer, int32_t data) { - uint32_t data_le = BSON_UINT32_TO_LE(data); - return buffer_write_bytes(buffer, (const char*)&data_le, 4); -} - -int buffer_write_int64(buffer_t buffer, int64_t data) { - uint64_t data_le = BSON_UINT64_TO_LE(data); - return buffer_write_bytes(buffer, (const char*)&data_le, 8); -} - -void buffer_write_int32_at_position(buffer_t buffer, - int position, - int32_t data) { - uint32_t data_le = BSON_UINT32_TO_LE(data); - memcpy(pymongo_buffer_get_buffer(buffer) + position, &data_le, 4); -} - -static int write_unicode(buffer_t buffer, PyObject* py_string) { - int size; - const char* data; - PyObject* encoded = PyUnicode_AsUTF8String(py_string); - if (!encoded) { - return 0; - } - data = PyBytes_AS_STRING(encoded); - if (!data) - goto unicodefail; - - if ((size = _downcast_and_check(PyBytes_GET_SIZE(encoded), 1)) == -1) - goto unicodefail; - - if (!buffer_write_int32(buffer, (int32_t)size)) - goto unicodefail; - - if (!buffer_write_bytes(buffer, data, size)) - goto unicodefail; - - Py_DECREF(encoded); - return 1; - -unicodefail: - Py_DECREF(encoded); - return 0; -} - -/* returns 0 on failure */ -static int write_string(buffer_t buffer, PyObject* py_string) { - int size; - const char* data; - if (PyUnicode_Check(py_string)){ - return write_unicode(buffer, py_string); - } - data = PyBytes_AsString(py_string); - if (!data) { - return 0; - } - - if ((size = _downcast_and_check(PyBytes_Size(py_string), 1)) == -1) - return 0; - - if (!buffer_write_int32(buffer, (int32_t)size)) { - return 0; - } - if (!buffer_write_bytes(buffer, data, size)) { - return 0; - } - return 1; -} - -/* Load a Python object to cache. - * - * Returns non-zero on failure. */ -static int _load_object(PyObject** object, char* module_name, char* object_name) { - PyObject* module; - - module = PyImport_ImportModule(module_name); - if (!module) { - return 1; - } - - *object = PyObject_GetAttrString(module, object_name); - Py_DECREF(module); - - return (*object) ? 0 : 2; -} - -/* Load all Python objects to cache. - * - * Returns non-zero on failure. */ -static int _load_python_objects(PyObject* module) { - PyObject* empty_string = NULL; - PyObject* re_compile = NULL; - PyObject* compiled = NULL; - PyObject* min_datetime_ms = NULL; - PyObject* max_datetime_ms = NULL; - struct module_state *state = GETSTATE(module); - if (!state) { - return 1; - } - - /* Cache commonly used attribute names to improve performance. */ - if (!((state->_type_marker_str = PyUnicode_FromString("_type_marker")) && - (state->_flags_str = PyUnicode_FromString("flags")) && - (state->_pattern_str = PyUnicode_FromString("pattern")) && - (state->_encoder_map_str = PyUnicode_FromString("_encoder_map")) && - (state->_decoder_map_str = PyUnicode_FromString("_decoder_map")) && - (state->_fallback_encoder_str = PyUnicode_FromString("_fallback_encoder")) && - (state->_raw_str = PyUnicode_FromString("raw")) && - (state->_subtype_str = PyUnicode_FromString("subtype")) && - (state->_binary_str = PyUnicode_FromString("binary")) && - (state->_scope_str = PyUnicode_FromString("scope")) && - (state->_inc_str = PyUnicode_FromString("inc")) && - (state->_time_str = PyUnicode_FromString("time")) && - (state->_bid_str = PyUnicode_FromString("bid")) && - (state->_replace_str = PyUnicode_FromString("replace")) && - (state->_astimezone_str = PyUnicode_FromString("astimezone")) && - (state->_id_str = PyUnicode_FromString("_id")) && - (state->_dollar_ref_str = PyUnicode_FromString("$ref")) && - (state->_dollar_id_str = PyUnicode_FromString("$id")) && - (state->_dollar_db_str = PyUnicode_FromString("$db")) && - (state->_tzinfo_str = PyUnicode_FromString("tzinfo")) && - (state->_as_doc_str = PyUnicode_FromString("as_doc")) && - (state->_utcoffset_str = PyUnicode_FromString("utcoffset")) && - (state->_from_uuid_str = PyUnicode_FromString("from_uuid")) && - (state->_as_uuid_str = PyUnicode_FromString("as_uuid")) && - (state->_from_bid_str = PyUnicode_FromString("from_bid")))) { - return 1; - } - - if (_load_object(&state->Binary, "bson.binary", "Binary") || - _load_object(&state->Code, "bson.code", "Code") || - _load_object(&state->ObjectId, "bson.objectid", "ObjectId") || - _load_object(&state->DBRef, "bson.dbref", "DBRef") || - _load_object(&state->Timestamp, "bson.timestamp", "Timestamp") || - _load_object(&state->MinKey, "bson.min_key", "MinKey") || - _load_object(&state->MaxKey, "bson.max_key", "MaxKey") || - _load_object(&state->UTC, "bson.tz_util", "utc") || - _load_object(&state->Regex, "bson.regex", "Regex") || - _load_object(&state->BSONInt64, "bson.int64", "Int64") || - _load_object(&state->Decimal128, "bson.decimal128", "Decimal128") || - _load_object(&state->UUID, "uuid", "UUID") || - _load_object(&state->Mapping, "collections.abc", "Mapping") || - _load_object(&state->DatetimeMS, "bson.datetime_ms", "DatetimeMS") || - _load_object(&min_datetime_ms, "bson.datetime_ms", "_MIN_UTC_MS") || - _load_object(&max_datetime_ms, "bson.datetime_ms", "_MAX_UTC_MS") || - _load_object(&state->min_datetime, "bson.datetime_ms", "_MIN_UTC") || - _load_object(&state->max_datetime, "bson.datetime_ms", "_MAX_UTC")) { - return 1; - } - - state->min_millis = PyLong_AsLongLong(min_datetime_ms); - state->max_millis = PyLong_AsLongLong(max_datetime_ms); - Py_DECREF(min_datetime_ms); - Py_DECREF(max_datetime_ms); - if ((state->min_millis == -1 || state->max_millis == -1) && PyErr_Occurred()) { - return 1; - } - - /* Speed up datetime.replace(tzinfo=utc) call */ - state->replace_args = PyTuple_New(0); - if (!state->replace_args) { - return 1; - } - state->replace_kwargs = PyDict_New(); - if (!state->replace_kwargs) { - return 1; - } - if (PyDict_SetItem(state->replace_kwargs, state->_tzinfo_str, state->UTC) == -1) { - return 1; - } - - /* Reload our REType hack too. */ - empty_string = PyBytes_FromString(""); - if (empty_string == NULL) { - state->REType = NULL; - return 1; - } - - if (_load_object(&re_compile, "re", "compile")) { - state->REType = NULL; - Py_DECREF(empty_string); - return 1; - } - - compiled = PyObject_CallFunction(re_compile, "O", empty_string); - Py_DECREF(re_compile); - if (compiled == NULL) { - state->REType = NULL; - Py_DECREF(empty_string); - return 1; - } - Py_INCREF(Py_TYPE(compiled)); - state->REType = Py_TYPE(compiled); - Py_DECREF(empty_string); - Py_DECREF(compiled); - return 0; -} - -/* - * Get the _type_marker from an Object. - * - * Return the type marker, 0 if there is no marker, or -1 on failure. - */ -static long _type_marker(PyObject* object, PyObject* _type_marker_str) { - PyObject* type_marker = NULL; - long type = 0; - - if (PyObject_HasAttr(object, _type_marker_str)) { - type_marker = PyObject_GetAttr(object, _type_marker_str); - if (type_marker == NULL) { - return -1; - } - } - - /* - * Python objects with broken __getattr__ implementations could return - * arbitrary types for a call to PyObject_GetAttrString. For example - * pymongo.database.Database returns a new Collection instance for - * __getattr__ calls with names that don't match an existing attribute - * or method. In some cases "value" could be a subtype of something - * we know how to serialize. Make a best effort to encode these types. - */ - if (type_marker && PyLong_CheckExact(type_marker)) { - type = PyLong_AsLong(type_marker); - Py_DECREF(type_marker); - } else { - Py_XDECREF(type_marker); - } - - return type; -} - -/* Fill out a type_registry_t* from a TypeRegistry object. - * - * Return 1 on success. options->document_class is a new reference. - * Return 0 on failure. - */ -int cbson_convert_type_registry(PyObject* registry_obj, type_registry_t* registry, PyObject* _encoder_map_str, PyObject* _decoder_map_str, PyObject* _fallback_encoder_str) { - registry->encoder_map = NULL; - registry->decoder_map = NULL; - registry->fallback_encoder = NULL; - registry->registry_obj = NULL; - - registry->encoder_map = PyObject_GetAttr(registry_obj, _encoder_map_str); - if (registry->encoder_map == NULL) { - goto fail; - } - registry->is_encoder_empty = (PyDict_Size(registry->encoder_map) == 0); - - registry->decoder_map = PyObject_GetAttr(registry_obj, _decoder_map_str); - if (registry->decoder_map == NULL) { - goto fail; - } - registry->is_decoder_empty = (PyDict_Size(registry->decoder_map) == 0); - - registry->fallback_encoder = PyObject_GetAttr(registry_obj, _fallback_encoder_str); - if (registry->fallback_encoder == NULL) { - goto fail; - } - registry->has_fallback_encoder = (registry->fallback_encoder != Py_None); - - registry->registry_obj = registry_obj; - Py_INCREF(registry->registry_obj); - return 1; - -fail: - Py_XDECREF(registry->encoder_map); - Py_XDECREF(registry->decoder_map); - Py_XDECREF(registry->fallback_encoder); - return 0; -} - -/* Fill out a codec_options_t* from a CodecOptions object. - * - * Return 1 on success. options->document_class is a new reference. - * Return 0 on failure. - */ -int convert_codec_options(PyObject* self, PyObject* options_obj, codec_options_t* options) { - PyObject* type_registry_obj = NULL; - struct module_state *state = GETSTATE(self); - long type_marker; - if (!state) { - return 0; - } - - options->unicode_decode_error_handler = NULL; - - if (!PyArg_ParseTuple(options_obj, "ObbzOOb", - &options->document_class, - &options->tz_aware, - &options->uuid_rep, - &options->unicode_decode_error_handler, - &options->tzinfo, - &type_registry_obj, - &options->datetime_conversion)) { - return 0; - } - - type_marker = _type_marker(options->document_class, - state->_type_marker_str); - if (type_marker < 0) { - return 0; - } - - if (!cbson_convert_type_registry(type_registry_obj, - &options->type_registry, state->_encoder_map_str, state->_decoder_map_str, state->_fallback_encoder_str)) { - return 0; - } - - options->is_raw_bson = (101 == type_marker); - options->options_obj = options_obj; - - Py_INCREF(options->options_obj); - Py_INCREF(options->document_class); - Py_INCREF(options->tzinfo); - - return 1; -} - -void destroy_codec_options(codec_options_t* options) { - Py_CLEAR(options->document_class); - Py_CLEAR(options->tzinfo); - Py_CLEAR(options->options_obj); - Py_CLEAR(options->type_registry.registry_obj); - Py_CLEAR(options->type_registry.encoder_map); - Py_CLEAR(options->type_registry.decoder_map); - Py_CLEAR(options->type_registry.fallback_encoder); -} - -static int write_element_to_buffer(PyObject* self, buffer_t buffer, - int type_byte, PyObject* value, - unsigned char check_keys, - const codec_options_t* options, - unsigned char in_custom_call, - unsigned char in_fallback_call) { - int result = 0; - if(Py_EnterRecursiveCall(" while encoding an object to BSON ")) { - return 0; - } - result = _write_element_to_buffer(self, buffer, type_byte, - value, check_keys, options, - in_custom_call, in_fallback_call); - Py_LeaveRecursiveCall(); - return result; -} - -static void -_set_cannot_encode(PyObject* value) { - if (PyLong_Check(value)) { - if ((PyLong_AsLongLong(value) == -1) && PyErr_Occurred()) { - return PyErr_SetString(PyExc_OverflowError, - "MongoDB can only handle up to 8-byte ints"); - } - } - - PyObject* type = NULL; - PyObject* InvalidDocument = _error("InvalidDocument"); - if (InvalidDocument == NULL) { - goto error; - } - - type = PyObject_Type(value); - if (type == NULL) { - goto error; - } - PyErr_Format(InvalidDocument, "cannot encode object: %R, of type: %R", - value, type); -error: - Py_XDECREF(type); - Py_XDECREF(InvalidDocument); -} - -/* - * Encode a builtin Python regular expression or our custom Regex class. - * - * Sets exception and returns 0 on failure. - */ -static int _write_regex_to_buffer( - buffer_t buffer, int type_byte, PyObject* value, PyObject* _flags_str, PyObject* _pattern_str) { - - PyObject* py_flags; - PyObject* py_pattern; - PyObject* encoded_pattern; - PyObject* decoded_pattern; - long int_flags; - char flags[FLAGS_SIZE]; - char check_utf8 = 0; - const char* pattern_data; - int pattern_length, flags_length; - - /* - * Both the builtin re type and our Regex class have attributes - * "flags" and "pattern". - */ - py_flags = PyObject_GetAttr(value, _flags_str); - if (!py_flags) { - return 0; - } - int_flags = PyLong_AsLong(py_flags); - Py_DECREF(py_flags); - if (int_flags == -1 && PyErr_Occurred()) { - return 0; - } - py_pattern = PyObject_GetAttr(value, _pattern_str); - if (!py_pattern) { - return 0; - } - - if (PyUnicode_Check(py_pattern)) { - encoded_pattern = PyUnicode_AsUTF8String(py_pattern); - Py_DECREF(py_pattern); - if (!encoded_pattern) { - return 0; - } - } else { - encoded_pattern = py_pattern; - check_utf8 = 1; - } - - if (!(pattern_data = PyBytes_AsString(encoded_pattern))) { - Py_DECREF(encoded_pattern); - return 0; - } - if ((pattern_length = _downcast_and_check(PyBytes_Size(encoded_pattern), 0)) == -1) { - Py_DECREF(encoded_pattern); - return 0; - } - - if (strlen(pattern_data) != (size_t) pattern_length){ - PyObject* InvalidDocument = _error("InvalidDocument"); - if (InvalidDocument) { - PyErr_SetString(InvalidDocument, - "regex patterns must not contain the NULL byte"); - Py_DECREF(InvalidDocument); - } - Py_DECREF(encoded_pattern); - return 0; - } - - if (check_utf8) { - decoded_pattern = PyUnicode_DecodeUTF8(pattern_data, (Py_ssize_t) pattern_length, NULL); - if (decoded_pattern == NULL) { - PyErr_Clear(); - PyObject* InvalidStringData = _error("InvalidStringData"); - if (InvalidStringData) { - PyErr_SetString(InvalidStringData, - "regex patterns must be valid UTF-8"); - Py_DECREF(InvalidStringData); - } - Py_DECREF(encoded_pattern); - return 0; - } - Py_DECREF(decoded_pattern); - } - - if (!buffer_write_bytes(buffer, pattern_data, pattern_length + 1)) { - Py_DECREF(encoded_pattern); - return 0; - } - Py_DECREF(encoded_pattern); - - flags[0] = 0; - - if (int_flags & 2) { - STRCAT(flags, FLAGS_SIZE, "i"); - } - if (int_flags & 4) { - STRCAT(flags, FLAGS_SIZE, "l"); - } - if (int_flags & 8) { - STRCAT(flags, FLAGS_SIZE, "m"); - } - if (int_flags & 16) { - STRCAT(flags, FLAGS_SIZE, "s"); - } - if (int_flags & 32) { - STRCAT(flags, FLAGS_SIZE, "u"); - } - if (int_flags & 64) { - STRCAT(flags, FLAGS_SIZE, "x"); - } - flags_length = (int)strlen(flags) + 1; - if (!buffer_write_bytes(buffer, flags, flags_length)) { - return 0; - } - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0B; - return 1; -} - -/* Write a single value to the buffer (also write its type_byte, for which - * space has already been reserved. - * - * returns 0 on failure */ -static int _write_element_to_buffer(PyObject* self, buffer_t buffer, - int type_byte, PyObject* value, - unsigned char check_keys, - const codec_options_t* options, - unsigned char in_custom_call, - unsigned char in_fallback_call) { - PyObject* new_value = NULL; - int retval; - int is_list; - long type; - struct module_state *state = GETSTATE(self); - if (!state) { - return 0; - } - /* - * Use _type_marker attribute instead of PyObject_IsInstance for better perf. - */ - type = _type_marker(value, state->_type_marker_str); - if (type < 0) { - return 0; - } - - switch (type) { - case 5: - { - /* Binary */ - PyObject* subtype_object; - char subtype; - const char* data; - int size; - - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x05; - subtype_object = PyObject_GetAttr(value, state->_subtype_str); - if (!subtype_object) { - return 0; - } - subtype = (char)PyLong_AsLong(subtype_object); - if (subtype == -1) { - Py_DECREF(subtype_object); - return 0; - } - size = _downcast_and_check(PyBytes_Size(value), 0); - if (size == -1) { - Py_DECREF(subtype_object); - return 0; - } - - Py_DECREF(subtype_object); - if (subtype == 2) { - int other_size = _downcast_and_check(PyBytes_Size(value), 4); - if (other_size == -1) - return 0; - if (!buffer_write_int32(buffer, other_size)) { - return 0; - } - if (!buffer_write_bytes(buffer, &subtype, 1)) { - return 0; - } - } - if (!buffer_write_int32(buffer, size)) { - return 0; - } - if (subtype != 2) { - if (!buffer_write_bytes(buffer, &subtype, 1)) { - return 0; - } - } - data = PyBytes_AsString(value); - if (!data) { - return 0; - } - if (!buffer_write_bytes(buffer, data, size)) { - return 0; - } - return 1; - } - case 7: - { - /* ObjectId */ - const char* data; - PyObject* pystring = PyObject_GetAttr(value, state->_binary_str); - if (!pystring) { - return 0; - } - data = PyBytes_AsString(pystring); - if (!data) { - Py_DECREF(pystring); - return 0; - } - if (!buffer_write_bytes(buffer, data, 12)) { - Py_DECREF(pystring); - return 0; - } - Py_DECREF(pystring); - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x07; - return 1; - } - case 9: - { - /* DatetimeMS */ - long long millis; - if (!millis_from_datetime_ms(value, &millis)) { - return 0; - } - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x09; - return buffer_write_int64(buffer, (int64_t)millis); - } - case 11: - { - /* Regex */ - return _write_regex_to_buffer(buffer, type_byte, value, state->_flags_str, state->_pattern_str); - } - case 13: - { - /* Code */ - int start_position, - length_location, - length; - - PyObject* scope = PyObject_GetAttr(value, state->_scope_str); - if (!scope) { - return 0; - } - - if (scope == Py_None) { - Py_DECREF(scope); - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0D; - return write_string(buffer, value); - } - - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0F; - - start_position = pymongo_buffer_get_position(buffer); - /* save space for length */ - length_location = pymongo_buffer_save_space(buffer, 4); - if (length_location == -1) { - Py_DECREF(scope); - return 0; - } - - if (!write_string(buffer, value)) { - Py_DECREF(scope); - return 0; - } - - if (!write_dict(self, buffer, scope, 0, options, 0)) { - Py_DECREF(scope); - return 0; - } - Py_DECREF(scope); - - length = pymongo_buffer_get_position(buffer) - start_position; - buffer_write_int32_at_position( - buffer, length_location, (int32_t)length); - return 1; - } - case 17: - { - /* Timestamp */ - PyObject* obj; - unsigned long i; - - obj = PyObject_GetAttr(value, state->_inc_str); - if (!obj) { - return 0; - } - i = PyLong_AsUnsignedLong(obj); - Py_DECREF(obj); - if (i == (unsigned long)-1 && PyErr_Occurred()) { - return 0; - } - if (!buffer_write_int32(buffer, (int32_t)i)) { - return 0; - } - - obj = PyObject_GetAttr(value, state->_time_str); - if (!obj) { - return 0; - } - i = PyLong_AsUnsignedLong(obj); - Py_DECREF(obj); - if (i == (unsigned long)-1 && PyErr_Occurred()) { - return 0; - } - if (!buffer_write_int32(buffer, (int32_t)i)) { - return 0; - } - - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x11; - return 1; - } - case 18: - { - /* Int64 */ - const long long ll = PyLong_AsLongLong(value); - if (PyErr_Occurred()) { /* Overflow */ - PyErr_SetString(PyExc_OverflowError, - "MongoDB can only handle up to 8-byte ints"); - return 0; - } - if (!buffer_write_int64(buffer, (int64_t)ll)) { - return 0; - } - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x12; - return 1; - } - case 19: - { - /* Decimal128 */ - const char* data; - PyObject* pystring = PyObject_GetAttr(value, state->_bid_str); - if (!pystring) { - return 0; - } - data = PyBytes_AsString(pystring); - if (!data) { - Py_DECREF(pystring); - return 0; - } - if (!buffer_write_bytes(buffer, data, 16)) { - Py_DECREF(pystring); - return 0; - } - Py_DECREF(pystring); - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x13; - return 1; - } - case 100: - { - /* DBRef */ - PyObject* as_doc = PyObject_CallMethodObjArgs(value, state->_as_doc_str, NULL); - if (!as_doc) { - return 0; - } - if (!write_dict(self, buffer, as_doc, 0, options, 0)) { - Py_DECREF(as_doc); - return 0; - } - Py_DECREF(as_doc); - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; - return 1; - } - case 101: - { - /* RawBSONDocument */ - if (!write_raw_doc(buffer, value, state->_raw_str)) { - return 0; - } - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; - return 1; - } - case 255: - { - /* MinKey */ - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0xFF; - return 1; - } - case 127: - { - /* MaxKey */ - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x7F; - return 1; - } - } - - /* No _type_marker attribute or not one of our types. */ - - if (PyBool_Check(value)) { - const char c = (value == Py_True) ? 0x01 : 0x00; - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x08; - return buffer_write_bytes(buffer, &c, 1); - } - else if (PyLong_Check(value)) { - const long long long_long_value = PyLong_AsLongLong(value); - if (long_long_value == -1 && PyErr_Occurred()) { - /* Ignore error and give the fallback_encoder a chance. */ - PyErr_Clear(); - } else if (-2147483648LL <= long_long_value && long_long_value <= 2147483647LL) { - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x10; - return buffer_write_int32(buffer, (int32_t)long_long_value); - } else { - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x12; - return buffer_write_int64(buffer, (int64_t)long_long_value); - } - } else if (PyFloat_Check(value)) { - const double d = PyFloat_AsDouble(value); - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x01; - return buffer_write_double(buffer, d); - } else if (value == Py_None) { - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0A; - return 1; - } else if (PyDict_Check(value)) { - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; - return write_dict(self, buffer, value, check_keys, options, 0); - } else if ((is_list = PyList_Check(value)) || PyTuple_Check(value)) { - Py_ssize_t items, i; - int start_position, - length_location, - length; - char zero = 0; - - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x04; - start_position = pymongo_buffer_get_position(buffer); - - /* save space for length */ - length_location = pymongo_buffer_save_space(buffer, 4); - if (length_location == -1) { - return 0; - } - if (is_list) { - items = PyList_Size(value); - } else { - items = PyTuple_Size(value); - } - if (items > BSON_MAX_SIZE) { - PyObject* BSONError = _error("BSONError"); - if (BSONError) { - PyErr_SetString(BSONError, - "Too many items to serialize."); - Py_DECREF(BSONError); - } - return 0; - } - for(i = 0; i < items; i++) { - int list_type_byte = pymongo_buffer_save_space(buffer, 1); - char name[BUF_SIZE]; - PyObject* item_value; - - if (list_type_byte == -1) { - return 0; - } - int res = LL2STR(name, (long long)i); - if (res == -1) { - return 0; - } - if (!buffer_write_bytes(buffer, name, (int)strlen(name) + 1)) { - return 0; - } - if (is_list) { - item_value = PyList_GET_ITEM(value, i); - } else { - item_value = PyTuple_GET_ITEM(value, i); - } - if (!item_value) { - return 0; - } - if (!write_element_to_buffer(self, buffer, list_type_byte, - item_value, check_keys, options, - 0, 0)) { - return 0; - } - } - - /* write null byte and fill in length */ - if (!buffer_write_bytes(buffer, &zero, 1)) { - return 0; - } - length = pymongo_buffer_get_position(buffer) - start_position; - buffer_write_int32_at_position( - buffer, length_location, (int32_t)length); - return 1; - /* Python3 special case. Store bytes as BSON binary subtype 0. */ - } else if (PyBytes_Check(value)) { - char subtype = 0; - int size; - const char* data = PyBytes_AS_STRING(value); - if (!data) - return 0; - if ((size = _downcast_and_check(PyBytes_GET_SIZE(value), 0)) == -1) - return 0; - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x05; - if (!buffer_write_int32(buffer, (int32_t)size)) { - return 0; - } - if (!buffer_write_bytes(buffer, &subtype, 1)) { - return 0; - } - if (!buffer_write_bytes(buffer, data, size)) { - return 0; - } - return 1; - } else if (PyUnicode_Check(value)) { - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x02; - return write_unicode(buffer, value); - } else if (PyDateTime_Check(value)) { - long long millis; - PyObject* utcoffset = PyObject_CallMethodObjArgs(value, state->_utcoffset_str , NULL); - if (utcoffset == NULL) - return 0; - if (utcoffset != Py_None) { - PyObject* result = PyNumber_Subtract(value, utcoffset); - if (!result) { - Py_DECREF(utcoffset); - return 0; - } - millis = millis_from_datetime(result); - Py_DECREF(result); - } else { - millis = millis_from_datetime(value); - } - Py_DECREF(utcoffset); - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x09; - return buffer_write_int64(buffer, (int64_t)millis); - } else if (PyObject_TypeCheck(value, state->REType)) { - return _write_regex_to_buffer(buffer, type_byte, value, state->_flags_str, state->_pattern_str); - } else if (PyObject_IsInstance(value, state->Mapping)) { - /* PyObject_IsInstance returns -1 on error */ - if (PyErr_Occurred()) { - return 0; - } - *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; - return write_dict(self, buffer, value, check_keys, options, 0); - } else if (PyObject_IsInstance(value, state->UUID)) { - PyObject* binary_value = NULL; - PyObject *uuid_rep_obj = NULL; - int result; - - /* PyObject_IsInstance returns -1 on error */ - if (PyErr_Occurred()) { - return 0; - } - - if (!(uuid_rep_obj = PyLong_FromLong(options->uuid_rep))) { - return 0; - } - binary_value = PyObject_CallMethodObjArgs(state->Binary, state->_from_uuid_str, value, uuid_rep_obj, NULL); - Py_DECREF(uuid_rep_obj); - - if (binary_value == NULL) { - return 0; - } - - result = _write_element_to_buffer(self, buffer, - type_byte, binary_value, - check_keys, options, - in_custom_call, - in_fallback_call); - Py_DECREF(binary_value); - return result; - } - - /* Try a custom encoder if one is provided and we have not already - * attempted to use a type encoder. */ - if (!in_custom_call && !options->type_registry.is_encoder_empty) { - PyObject* value_type = NULL; - PyObject* converter = NULL; - value_type = PyObject_Type(value); - if (value_type == NULL) { - return 0; - } - converter = PyDict_GetItem(options->type_registry.encoder_map, value_type); - Py_XDECREF(value_type); - if (converter != NULL) { - /* Transform types that have a registered converter. - * A new reference is created upon transformation. */ - new_value = PyObject_CallFunctionObjArgs(converter, value, NULL); - if (new_value == NULL) { - return 0; - } - retval = write_element_to_buffer(self, buffer, type_byte, new_value, - check_keys, options, 1, 0); - Py_XDECREF(new_value); - return retval; - } - } - - /* Try the fallback encoder if one is provided and we have not already - * attempted to use the fallback encoder. */ - if (!in_fallback_call && options->type_registry.has_fallback_encoder) { - new_value = PyObject_CallFunctionObjArgs( - options->type_registry.fallback_encoder, value, NULL); - if (new_value == NULL) { - // propagate any exception raised by the callback - return 0; - } - retval = write_element_to_buffer(self, buffer, type_byte, new_value, - check_keys, options, 0, 1); - Py_XDECREF(new_value); - return retval; - } - - /* We can't determine value's type. Fail. */ - _set_cannot_encode(value); - return 0; -} - -static int check_key_name(const char* name, int name_length) { - - if (name_length > 0 && name[0] == '$') { - PyObject* InvalidDocument = _error("InvalidDocument"); - if (InvalidDocument) { - PyObject* errmsg = PyUnicode_FromFormat( - "key '%s' must not start with '$'", name); - if (errmsg) { - PyErr_SetObject(InvalidDocument, errmsg); - Py_DECREF(errmsg); - } - Py_DECREF(InvalidDocument); - } - return 0; - } - if (strchr(name, '.')) { - PyObject* InvalidDocument = _error("InvalidDocument"); - if (InvalidDocument) { - PyObject* errmsg = PyUnicode_FromFormat( - "key '%s' must not contain '.'", name); - if (errmsg) { - PyErr_SetObject(InvalidDocument, errmsg); - Py_DECREF(errmsg); - } - Py_DECREF(InvalidDocument); - } - return 0; - } - return 1; -} - -/* Write a (key, value) pair to the buffer. - * - * Returns 0 on failure */ -int write_pair(PyObject* self, buffer_t buffer, const char* name, int name_length, - PyObject* value, unsigned char check_keys, - const codec_options_t* options, unsigned char allow_id) { - int type_byte; - - /* Don't write any _id elements unless we're explicitly told to - - * _id has to be written first so we do so, but don't bother - * deleting it from the dictionary being written. */ - if (!allow_id && strcmp(name, "_id") == 0) { - return 1; - } - - type_byte = pymongo_buffer_save_space(buffer, 1); - if (type_byte == -1) { - return 0; - } - if (check_keys && !check_key_name(name, name_length)) { - return 0; - } - if (!buffer_write_bytes(buffer, name, name_length + 1)) { - return 0; - } - if (!write_element_to_buffer(self, buffer, type_byte, - value, check_keys, options, 0, 0)) { - return 0; - } - return 1; -} - -int decode_and_write_pair(PyObject* self, buffer_t buffer, - PyObject* key, PyObject* value, - unsigned char check_keys, - const codec_options_t* options, - unsigned char top_level) { - PyObject* encoded; - const char* data; - int size; - if (PyUnicode_Check(key)) { - encoded = PyUnicode_AsUTF8String(key); - if (!encoded) { - return 0; - } - if (!(data = PyBytes_AS_STRING(encoded))) { - Py_DECREF(encoded); - return 0; - } - if ((size = _downcast_and_check(PyBytes_GET_SIZE(encoded), 1)) == -1) { - Py_DECREF(encoded); - return 0; - } - if (strlen(data) != (size_t)(size - 1)) { - PyObject* InvalidDocument = _error("InvalidDocument"); - if (InvalidDocument) { - PyErr_SetString(InvalidDocument, - "Key names must not contain the NULL byte"); - Py_DECREF(InvalidDocument); - } - Py_DECREF(encoded); - return 0; - } - } else { - PyObject* InvalidDocument = _error("InvalidDocument"); - if (InvalidDocument) { - PyObject* repr = PyObject_Repr(key); - if (repr) { - PyObject* errmsg = PyUnicode_FromString( - "documents must have only string keys, key was "); - if (errmsg) { - PyObject* error = PyUnicode_Concat(errmsg, repr); - if (error) { - PyErr_SetObject(InvalidDocument, error); - Py_DECREF(error); - } - Py_DECREF(errmsg); - Py_DECREF(repr); - } else { - Py_DECREF(repr); - } - } - Py_DECREF(InvalidDocument); - } - return 0; - } - - /* If top_level is True, don't allow writing _id here - it was already written. */ - if (!write_pair(self, buffer, data, - size - 1, value, check_keys, options, !top_level)) { - Py_DECREF(encoded); - return 0; - } - - Py_DECREF(encoded); - return 1; -} - - -/* Write a RawBSONDocument to the buffer. - * Returns the number of bytes written or 0 on failure. - */ -static int write_raw_doc(buffer_t buffer, PyObject* raw, PyObject* _raw_str) { - char* bytes; - Py_ssize_t len; - int len_int; - int bytes_written = 0; - PyObject* bytes_obj = NULL; - - bytes_obj = PyObject_GetAttr(raw, _raw_str); - if (!bytes_obj) { - goto fail; - } - - if (-1 == PyBytes_AsStringAndSize(bytes_obj, &bytes, &len)) { - goto fail; - } - len_int = _downcast_and_check(len, 0); - if (-1 == len_int) { - goto fail; - } - if (!buffer_write_bytes(buffer, bytes, len_int)) { - goto fail; - } - bytes_written = len_int; -fail: - Py_XDECREF(bytes_obj); - return bytes_written; -} - - -/* Update Invalid Document error message to include doc. - */ -void handle_invalid_doc_error(PyObject* dict) { - PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; - PyObject *msg = NULL, *dict_str = NULL, *new_msg = NULL; - PyErr_Fetch(&etype, &evalue, &etrace); - PyObject *InvalidDocument = _error("InvalidDocument"); - if (InvalidDocument == NULL) { - goto cleanup; - } - - if (evalue && PyErr_GivenExceptionMatches(etype, InvalidDocument)) { - PyObject *msg = PyObject_Str(evalue); - if (msg) { - // Prepend doc to the existing message - PyObject *dict_str = PyObject_Str(dict); - if (dict_str == NULL) { - goto cleanup; - } - const char * dict_str_utf8 = PyUnicode_AsUTF8(dict_str); - if (dict_str_utf8 == NULL) { - goto cleanup; - } - const char * msg_utf8 = PyUnicode_AsUTF8(msg); - if (msg_utf8 == NULL) { - goto cleanup; - } - PyObject *new_msg = PyUnicode_FromFormat("Invalid document %s | %s", dict_str_utf8, msg_utf8); - Py_DECREF(evalue); - Py_DECREF(etype); - etype = InvalidDocument; - InvalidDocument = NULL; - if (new_msg) { - evalue = new_msg; - } else { - evalue = msg; - } - } - PyErr_NormalizeException(&etype, &evalue, &etrace); - } -cleanup: - PyErr_Restore(etype, evalue, etrace); - Py_XDECREF(msg); - Py_XDECREF(InvalidDocument); - Py_XDECREF(dict_str); - Py_XDECREF(new_msg); -} - - -/* returns the number of bytes written or 0 on failure */ -int write_dict(PyObject* self, buffer_t buffer, - PyObject* dict, unsigned char check_keys, - const codec_options_t* options, unsigned char top_level) { - PyObject* key; - PyObject* iter; - char zero = 0; - int length; - int length_location; - struct module_state *state = GETSTATE(self); - long type_marker; - int is_dict = PyDict_Check(dict); - if (!state) { - return 0; - } - - if (!is_dict) { - /* check for RawBSONDocument */ - type_marker = _type_marker(dict, state->_type_marker_str); - if (type_marker < 0) { - return 0; - } - - if (101 == type_marker) { - return write_raw_doc(buffer, dict, state->_raw_str); - } - - if (!PyObject_IsInstance(dict, state->Mapping)) { - PyObject* repr; - if ((repr = PyObject_Repr(dict))) { - PyObject* errmsg = PyUnicode_FromString( - "encoder expected a mapping type but got: "); - if (errmsg) { - PyObject* error = PyUnicode_Concat(errmsg, repr); - if (error) { - PyErr_SetObject(PyExc_TypeError, error); - Py_DECREF(error); - } - Py_DECREF(errmsg); - Py_DECREF(repr); - } - else { - Py_DECREF(repr); - } - } else { - PyErr_SetString(PyExc_TypeError, - "encoder expected a mapping type"); - } - - return 0; - } - /* PyObject_IsInstance returns -1 on error */ - if (PyErr_Occurred()) { - return 0; - } - } - - length_location = pymongo_buffer_save_space(buffer, 4); - if (length_location == -1) { - return 0; - } - - /* Write _id first if this is a top level doc. */ - if (top_level) { - /* - * If "dict" is a defaultdict we don't want to call - * PyObject_GetItem on it. That would **create** - * an _id where one didn't previously exist (PYTHON-871). - */ - if (is_dict) { - /* PyDict_GetItem returns a borrowed reference. */ - PyObject* _id = PyDict_GetItem(dict, state->_id_str); - if (_id) { - if (!write_pair(self, buffer, "_id", 3, - _id, check_keys, options, 1)) { - return 0; - } - } - } else if (PyMapping_HasKey(dict, state->_id_str)) { - PyObject* _id = PyObject_GetItem(dict, state->_id_str); - if (!_id) { - return 0; - } - if (!write_pair(self, buffer, "_id", 3, - _id, check_keys, options, 1)) { - Py_DECREF(_id); - return 0; - } - /* PyObject_GetItem returns a new reference. */ - Py_DECREF(_id); - } - } - - if (is_dict) { - PyObject* value; - Py_ssize_t pos = 0; - while (PyDict_Next(dict, &pos, &key, &value)) { - if (!decode_and_write_pair(self, buffer, key, value, - check_keys, options, top_level)) { - if (PyErr_Occurred() && top_level) { - handle_invalid_doc_error(dict); - } - return 0; - } - } - } else { - iter = PyObject_GetIter(dict); - if (iter == NULL) { - return 0; - } - while ((key = PyIter_Next(iter)) != NULL) { - PyObject* value = PyObject_GetItem(dict, key); - if (!value) { - PyErr_SetObject(PyExc_KeyError, key); - Py_DECREF(key); - Py_DECREF(iter); - return 0; - } - if (!decode_and_write_pair(self, buffer, key, value, - check_keys, options, top_level)) { - if (PyErr_Occurred() && top_level) { - handle_invalid_doc_error(dict); - } - Py_DECREF(key); - Py_DECREF(value); - Py_DECREF(iter); - return 0; - } - Py_DECREF(key); - Py_DECREF(value); - } - Py_DECREF(iter); - if (PyErr_Occurred()) { - return 0; - } - } - - /* write null byte and fill in length */ - if (!buffer_write_bytes(buffer, &zero, 1)) { - return 0; - } - length = pymongo_buffer_get_position(buffer) - length_location; - buffer_write_int32_at_position( - buffer, length_location, (int32_t)length); - return length; -} - -static PyObject* _cbson_dict_to_bson(PyObject* self, PyObject* args) { - PyObject* dict; - PyObject* result; - unsigned char check_keys; - unsigned char top_level = 1; - PyObject* options_obj = NULL; - codec_options_t options; - buffer_t buffer; - PyObject* raw_bson_document_bytes_obj; - long type_marker; - struct module_state *state = GETSTATE(self); - if (!state) { - return NULL; - } - - if (!(PyArg_ParseTuple(args, "ObO|b", &dict, &check_keys, - &options_obj, &top_level) && - convert_codec_options(self, options_obj, &options))) { - return NULL; - } - - /* check for RawBSONDocument */ - type_marker = _type_marker(dict, state->_type_marker_str); - if (type_marker < 0) { - destroy_codec_options(&options); - return NULL; - } else if (101 == type_marker) { - destroy_codec_options(&options); - raw_bson_document_bytes_obj = PyObject_GetAttr(dict, state->_raw_str); - if (NULL == raw_bson_document_bytes_obj) { - return NULL; - } - return raw_bson_document_bytes_obj; - } - - buffer = pymongo_buffer_new(); - if (!buffer) { - destroy_codec_options(&options); - return NULL; - } - - if (!write_dict(self, buffer, dict, check_keys, &options, top_level)) { - destroy_codec_options(&options); - pymongo_buffer_free(buffer); - return NULL; - } - - /* objectify buffer */ - result = Py_BuildValue("y#", pymongo_buffer_get_buffer(buffer), - (Py_ssize_t)pymongo_buffer_get_position(buffer)); - destroy_codec_options(&options); - pymongo_buffer_free(buffer); - return result; -} - -/* - * Hook for optional decoding BSON documents to DBRef. - */ -static PyObject *_dbref_hook(PyObject* self, PyObject* value) { - struct module_state *state = GETSTATE(self); - PyObject* ref = NULL; - PyObject* id = NULL; - PyObject* database = NULL; - PyObject* ret = NULL; - int db_present = 0; - if (!state) { - return NULL; - } - - /* Decoding for DBRefs */ - if (PyMapping_HasKey(value, state->_dollar_ref_str) && PyMapping_HasKey(value, state->_dollar_id_str)) { /* DBRef */ - ref = PyObject_GetItem(value, state->_dollar_ref_str); - /* PyObject_GetItem returns NULL to indicate error. */ - if (!ref) { - goto invalid; - } - id = PyObject_GetItem(value, state->_dollar_id_str); - /* PyObject_GetItem returns NULL to indicate error. */ - if (!id) { - goto invalid; - } - - if (PyMapping_HasKey(value, state->_dollar_db_str)) { - database = PyObject_GetItem(value, state->_dollar_db_str); - if (!database) { - goto invalid; - } - db_present = 1; - } else { - database = Py_None; - Py_INCREF(database); - } - - // check types - if (!(PyUnicode_Check(ref) && (database == Py_None || PyUnicode_Check(database)))) { - ret = value; - goto invalid; - } - - PyMapping_DelItem(value, state->_dollar_ref_str); - PyMapping_DelItem(value, state->_dollar_id_str); - if (db_present) { - PyMapping_DelItem(value, state->_dollar_db_str); - } - - ret = PyObject_CallFunctionObjArgs(state->DBRef, ref, id, database, value, NULL); - Py_DECREF(value); - } else { - ret = value; - } -invalid: - Py_XDECREF(ref); - Py_XDECREF(id); - Py_XDECREF(database); - return ret; -} - -static PyObject* get_value(PyObject* self, PyObject* name, const char* buffer, - unsigned* position, unsigned char type, - unsigned max, const codec_options_t* options, int raw_array) { - struct module_state *state = GETSTATE(self); - PyObject* value = NULL; - if (!state) { - return NULL; - } - switch (type) { - case 1: - { - double d; - if (max < 8) { - goto invalid; - } - memcpy(&d, buffer + *position, 8); - value = PyFloat_FromDouble(BSON_DOUBLE_FROM_LE(d)); - *position += 8; - break; - } - case 2: - case 14: - { - uint32_t value_length; - if (max < 4) { - goto invalid; - } - memcpy(&value_length, buffer + *position, 4); - value_length = BSON_UINT32_FROM_LE(value_length); - /* Encoded string length + string */ - if (!value_length || max < value_length || max < 4 + value_length) { - goto invalid; - } - *position += 4; - /* Strings must end in \0 */ - if (buffer[*position + value_length - 1]) { - goto invalid; - } - value = PyUnicode_DecodeUTF8( - buffer + *position, value_length - 1, - options->unicode_decode_error_handler); - if (!value) { - goto invalid; - } - *position += value_length; - break; - } - case 3: - { - uint32_t size; - - if (max < 4) { - goto invalid; - } - memcpy(&size, buffer + *position, 4); - size = BSON_UINT32_FROM_LE(size); - if (size < BSON_MIN_SIZE || max < size) { - goto invalid; - } - /* Check for bad eoo */ - if (buffer[*position + size - 1]) { - goto invalid; - } - - value = elements_to_dict(self, buffer + *position, - size, options); - if (!value) { - goto invalid; - } - - if (options->is_raw_bson) { - *position += size; - break; - } - - /* Hook for DBRefs */ - value = _dbref_hook(self, value); - if (!value) { - goto invalid; - } - - *position += size; - break; - } - case 4: - { - uint32_t size, end; - - if (max < 4) { - goto invalid; - } - memcpy(&size, buffer + *position, 4); - size = BSON_UINT32_FROM_LE(size); - if (size < BSON_MIN_SIZE || max < size) { - goto invalid; - } - - end = *position + size - 1; - /* Check for bad eoo */ - if (buffer[end]) { - goto invalid; - } - - if (raw_array != 0) { - // Treat it as a binary buffer. - value = PyBytes_FromStringAndSize(buffer + *position, size); - *position += size; - break; - } - - *position += 4; - - value = PyList_New(0); - if (!value) { - goto invalid; - } - while (*position < end) { - PyObject* to_append; - - unsigned char bson_type = (unsigned char)buffer[(*position)++]; - - size_t key_size = strlen(buffer + *position); - if (max < key_size) { - Py_DECREF(value); - goto invalid; - } - /* just skip the key, they're in order. */ - *position += (unsigned)key_size + 1; - if (Py_EnterRecursiveCall(" while decoding a list value")) { - Py_DECREF(value); - goto invalid; - } - to_append = get_value(self, name, buffer, position, bson_type, - max - (unsigned)key_size, options, raw_array); - Py_LeaveRecursiveCall(); - if (!to_append) { - Py_DECREF(value); - goto invalid; - } - if (PyList_Append(value, to_append) < 0) { - Py_DECREF(value); - Py_DECREF(to_append); - goto invalid; - } - Py_DECREF(to_append); - } - if (*position != end) { - goto invalid; - } - (*position)++; - break; - } - case 5: - { - PyObject* data; - PyObject* st; - uint32_t length, length2; - unsigned char subtype; - - if (max < 5) { - goto invalid; - } - memcpy(&length, buffer + *position, 4); - length = BSON_UINT32_FROM_LE(length); - if (max < length) { - goto invalid; - } - - subtype = (unsigned char)buffer[*position + 4]; - *position += 5; - if (subtype == 2) { - if (length < 4) { - goto invalid; - } - memcpy(&length2, buffer + *position, 4); - length2 = BSON_UINT32_FROM_LE(length2); - if (length2 != length - 4) { - goto invalid; - } - } - /* Python3 special case. Decode BSON binary subtype 0 to bytes. */ - if (subtype == 0) { - value = PyBytes_FromStringAndSize(buffer + *position, length); - *position += length; - break; - } - if (subtype == 2) { - data = PyBytes_FromStringAndSize(buffer + *position + 4, length - 4); - } else { - data = PyBytes_FromStringAndSize(buffer + *position, length); - } - if (!data) { - goto invalid; - } - /* Encode as UUID or Binary based on options->uuid_rep */ - if (subtype == 3 || subtype == 4) { - PyObject* binary_value = NULL; - char uuid_rep = options->uuid_rep; - - /* UUID should always be 16 bytes */ - if (length != 16) { - goto uuiderror; - } - - binary_value = PyObject_CallFunction(state->Binary, "(Oi)", data, subtype); - if (binary_value == NULL) { - goto uuiderror; - } - - if ((uuid_rep == UNSPECIFIED) || - (subtype == 4 && uuid_rep != STANDARD) || - (subtype == 3 && uuid_rep == STANDARD)) { - value = binary_value; - Py_INCREF(value); - } else { - PyObject *uuid_rep_obj = PyLong_FromLong(uuid_rep); - if (!uuid_rep_obj) { - goto uuiderror; - } - value = PyObject_CallMethodObjArgs(binary_value, state->_as_uuid_str, uuid_rep_obj, NULL); - Py_DECREF(uuid_rep_obj); - } - - uuiderror: - Py_XDECREF(binary_value); - Py_DECREF(data); - if (!value) { - goto invalid; - } - *position += length; - break; - } - - st = PyLong_FromLong(subtype); - if (!st) { - Py_DECREF(data); - goto invalid; - } - value = PyObject_CallFunctionObjArgs(state->Binary, data, st, NULL); - Py_DECREF(st); - Py_DECREF(data); - if (!value) { - goto invalid; - } - *position += length; - break; - } - case 6: - case 10: - { - value = Py_None; - Py_INCREF(value); - break; - } - case 7: - { - if (max < 12) { - goto invalid; - } - value = PyObject_CallFunction(state->ObjectId, "y#", buffer + *position, (Py_ssize_t)12); - *position += 12; - break; - } - case 8: - { - char boolean_raw = buffer[(*position)++]; - if (0 == boolean_raw) { - value = Py_False; - } else if (1 == boolean_raw) { - value = Py_True; - } else { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_Format(InvalidBSON, "invalid boolean value: %x", boolean_raw); - Py_DECREF(InvalidBSON); - } - return NULL; - } - Py_INCREF(value); - break; - } - case 9: - { - int64_t millis; - if (max < 8) { - goto invalid; - } - memcpy(&millis, buffer + *position, 8); - millis = (int64_t)BSON_UINT64_FROM_LE(millis); - *position += 8; - - value = decode_datetime(self, millis, options); - break; - } - case 11: - { - PyObject* pattern; - int flags; - size_t flags_length, i; - size_t pattern_length = strlen(buffer + *position); - if (pattern_length > BSON_MAX_SIZE || max < pattern_length) { - goto invalid; - } - pattern = PyUnicode_DecodeUTF8( - buffer + *position, pattern_length, - options->unicode_decode_error_handler); - if (!pattern) { - goto invalid; - } - *position += (unsigned)pattern_length + 1; - flags_length = strlen(buffer + *position); - if (flags_length > BSON_MAX_SIZE || - (BSON_MAX_SIZE - pattern_length) < flags_length) { - Py_DECREF(pattern); - goto invalid; - } - if (max < pattern_length + flags_length) { - Py_DECREF(pattern); - goto invalid; - } - flags = 0; - for (i = 0; i < flags_length; i++) { - if (buffer[*position + i] == 'i') { - flags |= 2; - } else if (buffer[*position + i] == 'l') { - flags |= 4; - } else if (buffer[*position + i] == 'm') { - flags |= 8; - } else if (buffer[*position + i] == 's') { - flags |= 16; - } else if (buffer[*position + i] == 'u') { - flags |= 32; - } else if (buffer[*position + i] == 'x') { - flags |= 64; - } - } - *position += (unsigned)flags_length + 1; - - value = PyObject_CallFunction(state->Regex, "Oi", pattern, flags); - Py_DECREF(pattern); - break; - } - case 12: - { - uint32_t coll_length; - PyObject* collection; - PyObject* id = NULL; - - if (max < 4) { - goto invalid; - } - memcpy(&coll_length, buffer + *position, 4); - coll_length = BSON_UINT32_FROM_LE(coll_length); - /* Encoded string length + string + 12 byte ObjectId */ - if (!coll_length || max < coll_length || max < 4 + coll_length + 12) { - goto invalid; - } - *position += 4; - /* Strings must end in \0 */ - if (buffer[*position + coll_length - 1]) { - goto invalid; - } - - collection = PyUnicode_DecodeUTF8( - buffer + *position, coll_length - 1, - options->unicode_decode_error_handler); - if (!collection) { - goto invalid; - } - *position += coll_length; - - id = PyObject_CallFunction(state->ObjectId, "y#", buffer + *position, (Py_ssize_t)12); - if (!id) { - Py_DECREF(collection); - goto invalid; - } - *position += 12; - value = PyObject_CallFunctionObjArgs(state->DBRef, collection, id, NULL); - Py_DECREF(collection); - Py_DECREF(id); - break; - } - case 13: - { - PyObject* code; - uint32_t value_length; - if (max < 4) { - goto invalid; - } - memcpy(&value_length, buffer + *position, 4); - value_length = BSON_UINT32_FROM_LE(value_length); - /* Encoded string length + string */ - if (!value_length || max < value_length || max < 4 + value_length) { - goto invalid; - } - *position += 4; - /* Strings must end in \0 */ - if (buffer[*position + value_length - 1]) { - goto invalid; - } - code = PyUnicode_DecodeUTF8( - buffer + *position, value_length - 1, - options->unicode_decode_error_handler); - if (!code) { - goto invalid; - } - *position += value_length; - value = PyObject_CallFunctionObjArgs(state->Code, code, NULL, NULL); - Py_DECREF(code); - break; - } - case 15: - { - uint32_t c_w_s_size; - uint32_t code_size; - uint32_t scope_size; - uint32_t len; - PyObject* code; - PyObject* scope; - - if (max < 8) { - goto invalid; - } - - memcpy(&c_w_s_size, buffer + *position, 4); - c_w_s_size = BSON_UINT32_FROM_LE(c_w_s_size); - *position += 4; - - if (max < c_w_s_size) { - goto invalid; - } - - memcpy(&code_size, buffer + *position, 4); - code_size = BSON_UINT32_FROM_LE(code_size); - /* code_w_scope length + code length + code + scope length */ - len = 4 + 4 + code_size + 4; - if (!code_size || max < code_size || max < len || len < code_size) { - goto invalid; - } - *position += 4; - /* Strings must end in \0 */ - if (buffer[*position + code_size - 1]) { - goto invalid; - } - code = PyUnicode_DecodeUTF8( - buffer + *position, code_size - 1, - options->unicode_decode_error_handler); - if (!code) { - goto invalid; - } - *position += code_size; - - memcpy(&scope_size, buffer + *position, 4); - scope_size = BSON_UINT32_FROM_LE(scope_size); - /* code length + code + scope length + scope */ - len = 4 + 4 + code_size + scope_size; - if (scope_size < BSON_MIN_SIZE || len != c_w_s_size || len < scope_size) { - Py_DECREF(code); - goto invalid; - } - - /* Check for bad eoo */ - if (buffer[*position + scope_size - 1]) { - goto invalid; - } - scope = elements_to_dict(self, buffer + *position, - scope_size, options); - if (!scope) { - Py_DECREF(code); - goto invalid; - } - *position += scope_size; - - value = PyObject_CallFunctionObjArgs(state->Code, code, scope, NULL); - Py_DECREF(code); - Py_DECREF(scope); - break; - } - case 16: - { - int32_t i; - if (max < 4) { - goto invalid; - } - memcpy(&i, buffer + *position, 4); - i = (int32_t)BSON_UINT32_FROM_LE(i); - value = PyLong_FromLong(i); - if (!value) { - goto invalid; - } - *position += 4; - break; - } - case 17: - { - uint32_t time, inc; - if (max < 8) { - goto invalid; - } - memcpy(&inc, buffer + *position, 4); - memcpy(&time, buffer + *position + 4, 4); - inc = BSON_UINT32_FROM_LE(inc); - time = BSON_UINT32_FROM_LE(time); - value = PyObject_CallFunction(state->Timestamp, "II", time, inc); - *position += 8; - break; - } - case 18: - { - int64_t ll; - if (max < 8) { - goto invalid; - } - memcpy(&ll, buffer + *position, 8); - ll = (int64_t)BSON_UINT64_FROM_LE(ll); - value = PyObject_CallFunction(state->BSONInt64, "L", ll); - *position += 8; - break; - } - case 19: - { - if (max < 16) { - goto invalid; - } - PyObject *_bytes_obj = PyBytes_FromStringAndSize(buffer + *position, (Py_ssize_t)16); - if (!_bytes_obj) { - goto invalid; - } - value = PyObject_CallMethodObjArgs(state->Decimal128, state->_from_bid_str, _bytes_obj, NULL); - Py_DECREF(_bytes_obj); - *position += 16; - break; - } - case 255: - { - value = PyObject_CallFunctionObjArgs(state->MinKey, NULL); - break; - } - case 127: - { - value = PyObject_CallFunctionObjArgs(state->MaxKey, NULL); - break; - } - default: - { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyObject* bobj = PyBytes_FromFormat("%c", type); - if (bobj) { - PyObject* repr = PyObject_Repr(bobj); - Py_DECREF(bobj); - /* - * See http://bugs.python.org/issue22023 for why we can't - * just use PyUnicode_FromFormat with %S or %R to do this - * work. - */ - if (repr) { - PyObject* left = PyUnicode_FromString( - "Detected unknown BSON type "); - if (left) { - PyObject* lmsg = PyUnicode_Concat(left, repr); - Py_DECREF(left); - if (lmsg) { - PyObject* errmsg = PyUnicode_FromFormat( - "%U for fieldname '%U'. Are you using the " - "latest driver version?", lmsg, name); - if (errmsg) { - PyErr_SetObject(InvalidBSON, errmsg); - Py_DECREF(errmsg); - } - Py_DECREF(lmsg); - } - } - Py_DECREF(repr); - } - } - Py_DECREF(InvalidBSON); - } - goto invalid; - } - } - - if (value) { - if (!options->type_registry.is_decoder_empty) { - PyObject* value_type = NULL; - PyObject* converter = NULL; - value_type = PyObject_Type(value); - if (value_type == NULL) { - goto invalid; - } - converter = PyDict_GetItem(options->type_registry.decoder_map, value_type); - if (converter != NULL) { - PyObject* new_value = PyObject_CallFunctionObjArgs(converter, value, NULL); - Py_DECREF(value_type); - Py_DECREF(value); - return new_value; - } else { - Py_DECREF(value_type); - return value; - } - } - return value; - } - - invalid: - - /* - * Wrap any non-InvalidBSON errors in InvalidBSON. - */ - if (PyErr_Occurred()) { - PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; - PyObject *InvalidBSON = NULL; - - /* - * Calling _error clears the error state, so fetch it first. - */ - PyErr_Fetch(&etype, &evalue, &etrace); - - /* Dont reraise anything but PyExc_Exceptions as InvalidBSON. */ - if (PyErr_GivenExceptionMatches(etype, PyExc_Exception)) { - InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - if (!PyErr_GivenExceptionMatches(etype, InvalidBSON)) { - /* - * Raise InvalidBSON(str(e)). - */ - Py_DECREF(etype); - etype = InvalidBSON; - - if (evalue) { - PyObject *msg = PyObject_Str(evalue); - Py_DECREF(evalue); - evalue = msg; - } - PyErr_NormalizeException(&etype, &evalue, &etrace); - } else { - /* - * The current exception matches InvalidBSON, so we don't - * need this reference after all. - */ - Py_DECREF(InvalidBSON); - } - } - } - /* Steals references to args. */ - PyErr_Restore(etype, evalue, etrace); - } else { - PyObject *InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "invalid length or type code"); - Py_DECREF(InvalidBSON); - } - } - return NULL; -} - -/* - * Get the next 'name' and 'value' from a document in a string, whose position - * is provided. - * - * Returns the position of the next element in the document, or -1 on error. - */ -static int _element_to_dict(PyObject* self, const char* string, - unsigned position, unsigned max, - const codec_options_t* options, - int raw_array, - PyObject** name, PyObject** value) { - unsigned char type = (unsigned char)string[position++]; - size_t name_length = strlen(string + position); - if (name_length > BSON_MAX_SIZE || position + name_length >= max) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "field name too large"); - Py_DECREF(InvalidBSON); - } - return -1; - } - *name = PyUnicode_DecodeUTF8( - string + position, name_length, - options->unicode_decode_error_handler); - if (!*name) { - /* If NULL is returned then wrap the UnicodeDecodeError - in an InvalidBSON error */ - PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; - PyObject *InvalidBSON = NULL; - - PyErr_Fetch(&etype, &evalue, &etrace); - if (PyErr_GivenExceptionMatches(etype, PyExc_Exception)) { - InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - Py_DECREF(etype); - etype = InvalidBSON; - - if (evalue) { - PyObject *msg = PyObject_Str(evalue); - Py_DECREF(evalue); - evalue = msg; - } - PyErr_NormalizeException(&etype, &evalue, &etrace); - } - } - PyErr_Restore(etype, evalue, etrace); - return -1; - } - position += (unsigned)name_length + 1; - *value = get_value(self, *name, string, &position, type, - max - position, options, raw_array); - if (!*value) { - Py_DECREF(*name); - return -1; - } - return position; -} - -static PyObject* _cbson_element_to_dict(PyObject* self, PyObject* args) { - /* TODO: Support buffer protocol */ - char* string; - PyObject* bson; - PyObject* options_obj = NULL; - codec_options_t options; - unsigned position; - unsigned max; - int new_position; - int raw_array = 0; - PyObject* name; - PyObject* value; - PyObject* result_tuple; - - if (!(PyArg_ParseTuple(args, "OIIOp", &bson, &position, &max, - &options_obj, &raw_array) && - convert_codec_options(self, options_obj, &options))) { - return NULL; - } - - if (!PyBytes_Check(bson)) { - PyErr_SetString(PyExc_TypeError, "argument to _element_to_dict must be a bytes object"); - return NULL; - } - string = PyBytes_AS_STRING(bson); - - new_position = _element_to_dict(self, string, position, max, &options, raw_array, &name, &value); - if (new_position < 0) { - return NULL; - } - - result_tuple = Py_BuildValue("NNi", name, value, new_position); - if (!result_tuple) { - Py_DECREF(name); - Py_DECREF(value); - return NULL; - } - - destroy_codec_options(&options); - return result_tuple; -} - -static PyObject* _elements_to_dict(PyObject* self, const char* string, - unsigned max, - const codec_options_t* options) { - unsigned position = 0; - PyObject* dict = PyObject_CallObject(options->document_class, NULL); - if (!dict) { - return NULL; - } - int raw_array = 0; - while (position < max) { - PyObject* name = NULL; - PyObject* value = NULL; - int new_position; - - new_position = _element_to_dict( - self, string, position, max, options, raw_array, &name, &value); - if (new_position < 0) { - Py_DECREF(dict); - return NULL; - } else { - position = (unsigned)new_position; - } - - PyObject_SetItem(dict, name, value); - Py_DECREF(name); - Py_DECREF(value); - } - return dict; -} - -static PyObject* elements_to_dict(PyObject* self, const char* string, - unsigned max, - const codec_options_t* options) { - PyObject* result; - if (options->is_raw_bson) { - return PyObject_CallFunction( - options->document_class, "y#O", - string, max, options->options_obj); - } - if (Py_EnterRecursiveCall(" while decoding a BSON document")) - return NULL; - result = _elements_to_dict(self, string + 4, max - 5, options); - Py_LeaveRecursiveCall(); - return result; -} - -static int _get_buffer(PyObject *exporter, Py_buffer *view) { - if (PyObject_GetBuffer(exporter, view, PyBUF_SIMPLE) == -1) { - return 0; - } - if (!PyBuffer_IsContiguous(view, 'C')) { - PyErr_SetString(PyExc_ValueError, - "must be a contiguous buffer"); - goto fail; - } - if (!view->buf || view->len < 0) { - PyErr_SetString(PyExc_ValueError, "invalid buffer"); - goto fail; - } - if (view->itemsize != 1) { - PyErr_SetString(PyExc_ValueError, - "buffer data must be ascii or utf8"); - goto fail; - } - return 1; -fail: - PyBuffer_Release(view); - return 0; -} - -static PyObject* _cbson_bson_to_dict(PyObject* self, PyObject* args) { - int32_t size; - Py_ssize_t total_size; - const char* string; - PyObject* bson = NULL; - codec_options_t options; - PyObject* result = NULL; - PyObject* options_obj; - Py_buffer view = {0}; - - if (! (PyArg_ParseTuple(args, "OO", &bson, &options_obj) && - convert_codec_options(self, options_obj, &options))) { - return result; - } - - if (!_get_buffer(bson, &view)) { - destroy_codec_options(&options); - return result; - } - - total_size = view.len; - - if (total_size < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, - "not enough data for a BSON document"); - Py_DECREF(InvalidBSON); - } - goto done;; - } - - string = (char*)view.buf; - memcpy(&size, string, 4); - size = (int32_t)BSON_UINT32_FROM_LE(size); - if (size < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "invalid message size"); - Py_DECREF(InvalidBSON); - } - goto done; - } - - if (total_size < size || total_size > BSON_MAX_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "objsize too large"); - Py_DECREF(InvalidBSON); - } - goto done; - } - - if (size != total_size || string[size - 1]) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "bad eoo"); - Py_DECREF(InvalidBSON); - } - goto done; - } - - result = elements_to_dict(self, string, (unsigned)size, &options); -done: - PyBuffer_Release(&view); - destroy_codec_options(&options); - return result; -} - -static PyObject* _cbson_decode_all(PyObject* self, PyObject* args) { - int32_t size; - Py_ssize_t total_size; - const char* string; - PyObject* bson; - PyObject* dict; - PyObject* result = NULL; - codec_options_t options; - PyObject* options_obj = NULL; - Py_buffer view = {0}; - - if (!(PyArg_ParseTuple(args, "OO", &bson, &options_obj) && - convert_codec_options(self, options_obj, &options))) { - return NULL; - } - - if (!_get_buffer(bson, &view)) { - destroy_codec_options(&options); - return NULL; - } - total_size = view.len; - string = (char*)view.buf; - - if (!(result = PyList_New(0))) { - goto fail; - } - - while (total_size > 0) { - if (total_size < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, - "not enough data for a BSON document"); - Py_DECREF(InvalidBSON); - } - Py_DECREF(result); - goto fail; - } - - memcpy(&size, string, 4); - size = (int32_t)BSON_UINT32_FROM_LE(size); - if (size < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "invalid message size"); - Py_DECREF(InvalidBSON); - } - Py_DECREF(result); - goto fail; - } - - if (total_size < size) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "objsize too large"); - Py_DECREF(InvalidBSON); - } - Py_DECREF(result); - goto fail; - } - - if (string[size - 1]) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "bad eoo"); - Py_DECREF(InvalidBSON); - } - Py_DECREF(result); - goto fail; - } - - dict = elements_to_dict(self, string, (unsigned)size, &options); - if (!dict) { - Py_DECREF(result); - goto fail; - } - if (PyList_Append(result, dict) < 0) { - Py_DECREF(dict); - Py_DECREF(result); - goto fail; - } - Py_DECREF(dict); - string += size; - total_size -= size; - } - goto done; -fail: - result = NULL; -done: - PyBuffer_Release(&view); - destroy_codec_options(&options); - return result; -} - - -static PyObject* _cbson_array_of_documents_to_buffer(PyObject* self, PyObject* args) { - uint32_t size; - uint32_t value_length; - uint32_t position = 0; - buffer_t buffer; - const char* string; - PyObject* arr; - PyObject* result = NULL; - Py_buffer view = {0}; - - if (!PyArg_ParseTuple(args, "O", &arr)) { - return NULL; - } - - if (!_get_buffer(arr, &view)) { - return NULL; - } - - buffer = pymongo_buffer_new(); - if (!buffer) { - PyBuffer_Release(&view); - return NULL; - } - - string = (char*)view.buf; - - if (view.len < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, - "not enough data for a BSON document"); - Py_DECREF(InvalidBSON); - } - goto fail; - } - - memcpy(&size, string, 4); - size = BSON_UINT32_FROM_LE(size); - - /* validate the size of the array */ - if (view.len != (int32_t)size || (int32_t)size < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "objsize too large"); - Py_DECREF(InvalidBSON); - } - goto fail; - } - - if (string[size - 1]) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "bad eoo"); - Py_DECREF(InvalidBSON); - } - goto fail; - } - - /* save space for length */ - if (pymongo_buffer_save_space(buffer, size) == -1) { - goto fail; - } - pymongo_buffer_update_position(buffer, 0); - - position += 4; - while (position < size - 1) { - // Verify the value is an object. - unsigned char type = (unsigned char)string[position]; - if (type != 3) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "array element was not an object"); - Py_DECREF(InvalidBSON); - } - goto fail; - } - - // Just skip the keys. - position = position + strlen(string + position) + 1; - - if (position >= size || (size - position) < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "invalid array content"); - Py_DECREF(InvalidBSON); - } - goto fail; - } - - memcpy(&value_length, string + position, 4); - value_length = BSON_UINT32_FROM_LE(value_length); - if (value_length < BSON_MIN_SIZE) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, "invalid message size"); - Py_DECREF(InvalidBSON); - } - goto fail; - } - - if (pymongo_buffer_write(buffer, string + position, value_length) == 1) { - goto fail; - } - position += value_length; - } - - if (position != size - 1) { - PyObject* InvalidBSON = _error("InvalidBSON"); - if (InvalidBSON) { - PyErr_SetString(InvalidBSON, - "bad object or element length"); - Py_DECREF(InvalidBSON); - } - goto fail; - } - - /* objectify buffer */ - result = Py_BuildValue("y#", pymongo_buffer_get_buffer(buffer), - (Py_ssize_t)pymongo_buffer_get_position(buffer)); - goto done; -fail: - result = NULL; -done: - PyBuffer_Release(&view); - pymongo_buffer_free(buffer); - return result; -} - - -static PyMethodDef _CBSONMethods[] = { - {"_dict_to_bson", _cbson_dict_to_bson, METH_VARARGS, - "convert a dictionary to a string containing its BSON representation."}, - {"_bson_to_dict", _cbson_bson_to_dict, METH_VARARGS, - "convert a BSON string to a SON object."}, - {"_decode_all", _cbson_decode_all, METH_VARARGS, - "convert binary data to a sequence of documents."}, - {"_element_to_dict", _cbson_element_to_dict, METH_VARARGS, - "Decode a single key, value pair."}, - {"_array_of_documents_to_buffer", _cbson_array_of_documents_to_buffer, METH_VARARGS, "Convert raw array of documents to a stream of BSON documents"}, - {"_test_long_long_to_str", _test_long_long_to_str, METH_VARARGS, "Test conversion of extreme and common Py_ssize_t values to str."}, - {NULL, NULL, 0, NULL} -}; - -#define INITERROR return -1; -static int _cbson_traverse(PyObject *m, visitproc visit, void *arg) { - struct module_state *state = GETSTATE(m); - if (!state) { - return 0; - } - Py_VISIT(state->Binary); - Py_VISIT(state->Code); - Py_VISIT(state->ObjectId); - Py_VISIT(state->DBRef); - Py_VISIT(state->Regex); - Py_VISIT(state->UUID); - Py_VISIT(state->Timestamp); - Py_VISIT(state->MinKey); - Py_VISIT(state->MaxKey); - Py_VISIT(state->UTC); - Py_VISIT(state->REType); - Py_VISIT(state->_type_marker_str); - Py_VISIT(state->_flags_str); - Py_VISIT(state->_pattern_str); - Py_VISIT(state->_encoder_map_str); - Py_VISIT(state->_decoder_map_str); - Py_VISIT(state->_fallback_encoder_str); - Py_VISIT(state->_raw_str); - Py_VISIT(state->_subtype_str); - Py_VISIT(state->_binary_str); - Py_VISIT(state->_scope_str); - Py_VISIT(state->_inc_str); - Py_VISIT(state->_time_str); - Py_VISIT(state->_bid_str); - Py_VISIT(state->_replace_str); - Py_VISIT(state->_astimezone_str); - Py_VISIT(state->_id_str); - Py_VISIT(state->_dollar_ref_str); - Py_VISIT(state->_dollar_id_str); - Py_VISIT(state->_dollar_db_str); - Py_VISIT(state->_tzinfo_str); - Py_VISIT(state->_as_doc_str); - Py_VISIT(state->_utcoffset_str); - Py_VISIT(state->_from_uuid_str); - Py_VISIT(state->_as_uuid_str); - Py_VISIT(state->_from_bid_str); - Py_VISIT(state->min_datetime); - Py_VISIT(state->max_datetime); - Py_VISIT(state->replace_args); - Py_VISIT(state->replace_kwargs); - return 0; -} - -static int _cbson_clear(PyObject *m) { - struct module_state *state = GETSTATE(m); - if (!state) { - return 0; - } - Py_CLEAR(state->Binary); - Py_CLEAR(state->Code); - Py_CLEAR(state->ObjectId); - Py_CLEAR(state->DBRef); - Py_CLEAR(state->Regex); - Py_CLEAR(state->UUID); - Py_CLEAR(state->Timestamp); - Py_CLEAR(state->MinKey); - Py_CLEAR(state->MaxKey); - Py_CLEAR(state->UTC); - Py_CLEAR(state->REType); - Py_CLEAR(state->_type_marker_str); - Py_CLEAR(state->_flags_str); - Py_CLEAR(state->_pattern_str); - Py_CLEAR(state->_encoder_map_str); - Py_CLEAR(state->_decoder_map_str); - Py_CLEAR(state->_fallback_encoder_str); - Py_CLEAR(state->_raw_str); - Py_CLEAR(state->_subtype_str); - Py_CLEAR(state->_binary_str); - Py_CLEAR(state->_scope_str); - Py_CLEAR(state->_inc_str); - Py_CLEAR(state->_time_str); - Py_CLEAR(state->_bid_str); - Py_CLEAR(state->_replace_str); - Py_CLEAR(state->_astimezone_str); - Py_CLEAR(state->_id_str); - Py_CLEAR(state->_dollar_ref_str); - Py_CLEAR(state->_dollar_id_str); - Py_CLEAR(state->_dollar_db_str); - Py_CLEAR(state->_tzinfo_str); - Py_CLEAR(state->_as_doc_str); - Py_CLEAR(state->_utcoffset_str); - Py_CLEAR(state->_from_uuid_str); - Py_CLEAR(state->_as_uuid_str); - Py_CLEAR(state->_from_bid_str); - Py_CLEAR(state->min_datetime); - Py_CLEAR(state->max_datetime); - Py_CLEAR(state->replace_args); - Py_CLEAR(state->replace_kwargs); - return 0; -} - -/* Multi-phase extension module initialization code. - * See https://peps.python.org/pep-0489/. -*/ -static int -_cbson_exec(PyObject *m) -{ - PyObject *c_api_object; - static void *_cbson_API[_cbson_API_POINTER_COUNT]; - - PyDateTime_IMPORT; - if (PyDateTimeAPI == NULL) { - INITERROR; - } - - /* Export C API */ - _cbson_API[_cbson_buffer_write_bytes_INDEX] = (void *) buffer_write_bytes; - _cbson_API[_cbson_write_dict_INDEX] = (void *) write_dict; - _cbson_API[_cbson_write_pair_INDEX] = (void *) write_pair; - _cbson_API[_cbson_decode_and_write_pair_INDEX] = (void *) decode_and_write_pair; - _cbson_API[_cbson_convert_codec_options_INDEX] = (void *) convert_codec_options; - _cbson_API[_cbson_destroy_codec_options_INDEX] = (void *) destroy_codec_options; - _cbson_API[_cbson_buffer_write_double_INDEX] = (void *) buffer_write_double; - _cbson_API[_cbson_buffer_write_int32_INDEX] = (void *) buffer_write_int32; - _cbson_API[_cbson_buffer_write_int64_INDEX] = (void *) buffer_write_int64; - _cbson_API[_cbson_buffer_write_int32_at_position_INDEX] = - (void *) buffer_write_int32_at_position; - _cbson_API[_cbson_downcast_and_check_INDEX] = (void *) _downcast_and_check; - - c_api_object = PyCapsule_New((void *) _cbson_API, "_cbson._C_API", NULL); - if (c_api_object == NULL) - INITERROR; - - /* Import several python objects */ - if (_load_python_objects(m)) { - Py_DECREF(c_api_object); - Py_DECREF(m); - INITERROR; - } - -#if PY_VERSION_HEX >= 0x030D0000 - if (PyModule_Add(m, "_C_API", c_api_object) < 0) { - Py_DECREF(m); - INITERROR; - } -# else - if (PyModule_AddObject(m, "_C_API", c_api_object) < 0) { - Py_DECREF(c_api_object); - Py_DECREF(m); - INITERROR; - } -#endif - - return 0; -} - -static PyModuleDef_Slot _cbson_slots[] = { - {Py_mod_exec, _cbson_exec}, -#if defined(Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED) - {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED}, -#endif -#if PY_VERSION_HEX >= 0x030D0000 - {Py_mod_gil, Py_MOD_GIL_NOT_USED}, -#endif - {0, NULL}, -}; - - -static struct PyModuleDef moduledef = { - PyModuleDef_HEAD_INIT, - "_cbson", - NULL, - sizeof(struct module_state), - _CBSONMethods, - _cbson_slots, - _cbson_traverse, - _cbson_clear, - NULL -}; - -PyMODINIT_FUNC -PyInit__cbson(void) -{ - return PyModuleDef_Init(&moduledef); -} diff --git a/venv/lib/python3.12/site-packages/bson/_cbsonmodule.h b/venv/lib/python3.12/site-packages/bson/_cbsonmodule.h deleted file mode 100644 index 3be2b744..00000000 --- a/venv/lib/python3.12/site-packages/bson/_cbsonmodule.h +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright 2009-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "bson-endian.h" - -#ifndef _CBSONMODULE_H -#define _CBSONMODULE_H - -#if defined(WIN32) || defined(_MSC_VER) -/* - * This macro is basically an implementation of asprintf for win32 - * We print to the provided buffer to get the string value as an int. - * USE LL2STR. This is kept only to test LL2STR. - */ -#if defined(_MSC_VER) && (_MSC_VER >= 1400) -#define INT2STRING(buffer, i) \ - _snprintf_s((buffer), \ - _scprintf("%lld", (i)) + 1, \ - _scprintf("%lld", (i)) + 1, \ - "%lld", \ - (i)) -#define STRCAT(dest, n, src) strcat_s((dest), (n), (src)) -#else -#define INT2STRING(buffer, i) \ - _snprintf((buffer), \ - _scprintf("%lld", (i)) + 1, \ - "%lld", \ - (i)) -#define STRCAT(dest, n, src) strcat((dest), (src)) -#endif -#else -#define INT2STRING(buffer, i) snprintf((buffer), sizeof((buffer)), "%lld", (i)) -#define STRCAT(dest, n, src) strcat((dest), (src)) -#endif - -/* Just enough space in char array to hold LLONG_MIN and null terminator */ -#define BUF_SIZE 21 -/* Converts integer to its string representation in decimal notation. */ -extern int cbson_long_long_to_str(long long int num, char* str, size_t size); -#define LL2STR(buffer, i) cbson_long_long_to_str((i), (buffer), sizeof(buffer)) - -typedef struct type_registry_t { - PyObject* encoder_map; - PyObject* decoder_map; - PyObject* fallback_encoder; - PyObject* registry_obj; - unsigned char is_encoder_empty; - unsigned char is_decoder_empty; - unsigned char has_fallback_encoder; -} type_registry_t; - -typedef struct codec_options_t { - PyObject* document_class; - unsigned char tz_aware; - unsigned char uuid_rep; - char* unicode_decode_error_handler; - PyObject* tzinfo; - type_registry_t type_registry; - unsigned char datetime_conversion; - PyObject* options_obj; - unsigned char is_raw_bson; -} codec_options_t; - -/* C API functions */ -#define _cbson_buffer_write_bytes_INDEX 0 -#define _cbson_buffer_write_bytes_RETURN int -#define _cbson_buffer_write_bytes_PROTO (buffer_t buffer, const char* data, int size) - -#define _cbson_write_dict_INDEX 1 -#define _cbson_write_dict_RETURN int -#define _cbson_write_dict_PROTO (PyObject* self, buffer_t buffer, PyObject* dict, unsigned char check_keys, const codec_options_t* options, unsigned char top_level) - -#define _cbson_write_pair_INDEX 2 -#define _cbson_write_pair_RETURN int -#define _cbson_write_pair_PROTO (PyObject* self, buffer_t buffer, const char* name, int name_length, PyObject* value, unsigned char check_keys, const codec_options_t* options, unsigned char allow_id) - -#define _cbson_decode_and_write_pair_INDEX 3 -#define _cbson_decode_and_write_pair_RETURN int -#define _cbson_decode_and_write_pair_PROTO (PyObject* self, buffer_t buffer, PyObject* key, PyObject* value, unsigned char check_keys, const codec_options_t* options, unsigned char top_level) - -#define _cbson_convert_codec_options_INDEX 4 -#define _cbson_convert_codec_options_RETURN int -#define _cbson_convert_codec_options_PROTO (PyObject* self, PyObject* options_obj, codec_options_t* options) - -#define _cbson_destroy_codec_options_INDEX 5 -#define _cbson_destroy_codec_options_RETURN void -#define _cbson_destroy_codec_options_PROTO (codec_options_t* options) - -#define _cbson_buffer_write_double_INDEX 6 -#define _cbson_buffer_write_double_RETURN int -#define _cbson_buffer_write_double_PROTO (buffer_t buffer, double data) - -#define _cbson_buffer_write_int32_INDEX 7 -#define _cbson_buffer_write_int32_RETURN int -#define _cbson_buffer_write_int32_PROTO (buffer_t buffer, int32_t data) - -#define _cbson_buffer_write_int64_INDEX 8 -#define _cbson_buffer_write_int64_RETURN int -#define _cbson_buffer_write_int64_PROTO (buffer_t buffer, int64_t data) - -#define _cbson_buffer_write_int32_at_position_INDEX 9 -#define _cbson_buffer_write_int32_at_position_RETURN void -#define _cbson_buffer_write_int32_at_position_PROTO (buffer_t buffer, int position, int32_t data) - -#define _cbson_downcast_and_check_INDEX 10 -#define _cbson_downcast_and_check_RETURN int -#define _cbson_downcast_and_check_PROTO (Py_ssize_t size, uint8_t extra) - -/* Total number of C API pointers */ -#define _cbson_API_POINTER_COUNT 11 - -#ifdef _CBSON_MODULE -/* This section is used when compiling _cbsonmodule */ - -static _cbson_buffer_write_bytes_RETURN buffer_write_bytes _cbson_buffer_write_bytes_PROTO; - -static _cbson_write_dict_RETURN write_dict _cbson_write_dict_PROTO; - -static _cbson_write_pair_RETURN write_pair _cbson_write_pair_PROTO; - -static _cbson_decode_and_write_pair_RETURN decode_and_write_pair _cbson_decode_and_write_pair_PROTO; - -static _cbson_convert_codec_options_RETURN convert_codec_options _cbson_convert_codec_options_PROTO; - -static _cbson_destroy_codec_options_RETURN destroy_codec_options _cbson_destroy_codec_options_PROTO; - -static _cbson_buffer_write_double_RETURN buffer_write_double _cbson_buffer_write_double_PROTO; - -static _cbson_buffer_write_int32_RETURN buffer_write_int32 _cbson_buffer_write_int32_PROTO; - -static _cbson_buffer_write_int64_RETURN buffer_write_int64 _cbson_buffer_write_int64_PROTO; - -static _cbson_buffer_write_int32_at_position_RETURN buffer_write_int32_at_position _cbson_buffer_write_int32_at_position_PROTO; - -static _cbson_downcast_and_check_RETURN _downcast_and_check _cbson_downcast_and_check_PROTO; - -#else -/* This section is used in modules that use _cbsonmodule's API */ - -static void **_cbson_API; - -#define buffer_write_bytes (*(_cbson_buffer_write_bytes_RETURN (*)_cbson_buffer_write_bytes_PROTO) _cbson_API[_cbson_buffer_write_bytes_INDEX]) - -#define write_dict (*(_cbson_write_dict_RETURN (*)_cbson_write_dict_PROTO) _cbson_API[_cbson_write_dict_INDEX]) - -#define write_pair (*(_cbson_write_pair_RETURN (*)_cbson_write_pair_PROTO) _cbson_API[_cbson_write_pair_INDEX]) - -#define decode_and_write_pair (*(_cbson_decode_and_write_pair_RETURN (*)_cbson_decode_and_write_pair_PROTO) _cbson_API[_cbson_decode_and_write_pair_INDEX]) - -#define convert_codec_options (*(_cbson_convert_codec_options_RETURN (*)_cbson_convert_codec_options_PROTO) _cbson_API[_cbson_convert_codec_options_INDEX]) - -#define destroy_codec_options (*(_cbson_destroy_codec_options_RETURN (*)_cbson_destroy_codec_options_PROTO) _cbson_API[_cbson_destroy_codec_options_INDEX]) - -#define buffer_write_double (*(_cbson_buffer_write_double_RETURN (*)_cbson_buffer_write_double_PROTO) _cbson_API[_cbson_buffer_write_double_INDEX]) - -#define buffer_write_int32 (*(_cbson_buffer_write_int32_RETURN (*)_cbson_buffer_write_int32_PROTO) _cbson_API[_cbson_buffer_write_int32_INDEX]) - -#define buffer_write_int64 (*(_cbson_buffer_write_int64_RETURN (*)_cbson_buffer_write_int64_PROTO) _cbson_API[_cbson_buffer_write_int64_INDEX]) - -#define buffer_write_int32_at_position (*(_cbson_buffer_write_int32_at_position_RETURN (*)_cbson_buffer_write_int32_at_position_PROTO) _cbson_API[_cbson_buffer_write_int32_at_position_INDEX]) - -#define _downcast_and_check (*(_cbson_downcast_and_check_RETURN (*)_cbson_downcast_and_check_PROTO) _cbson_API[_cbson_downcast_and_check_INDEX]) - -#define _cbson_IMPORT _cbson_API = (void **)PyCapsule_Import("_cbson._C_API", 0) - -#endif - -#endif // _CBSONMODULE_H diff --git a/venv/lib/python3.12/site-packages/bson/_helpers.py b/venv/lib/python3.12/site-packages/bson/_helpers.py deleted file mode 100644 index 5a479867..00000000 --- a/venv/lib/python3.12/site-packages/bson/_helpers.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2021-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Setstate and getstate functions for objects with __slots__, allowing -compatibility with default pickling protocol -""" -from __future__ import annotations - -from typing import Any, Mapping - - -def _setstate_slots(self: Any, state: Any) -> None: - for slot, value in state.items(): - setattr(self, slot, value) - - -def _mangle_name(name: str, prefix: str) -> str: - if name.startswith("__"): - prefix = "_" + prefix - else: - prefix = "" - return prefix + name - - -def _getstate_slots(self: Any) -> Mapping[Any, Any]: - prefix = self.__class__.__name__ - ret = {} - for name in self.__slots__: - mangled_name = _mangle_name(name, prefix) - if hasattr(self, mangled_name): - ret[mangled_name] = getattr(self, mangled_name) - return ret diff --git a/venv/lib/python3.12/site-packages/bson/binary.py b/venv/lib/python3.12/site-packages/bson/binary.py deleted file mode 100644 index 6698e55c..00000000 --- a/venv/lib/python3.12/site-packages/bson/binary.py +++ /dev/null @@ -1,551 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import annotations - -import struct -from enum import Enum -from typing import TYPE_CHECKING, Any, Optional, Sequence, Tuple, Type, Union, overload -from uuid import UUID - -"""Tools for representing BSON binary data. -""" - -BINARY_SUBTYPE = 0 -"""BSON binary subtype for binary data. - -This is the default subtype for binary data. -""" - -FUNCTION_SUBTYPE = 1 -"""BSON binary subtype for functions. -""" - -OLD_BINARY_SUBTYPE = 2 -"""Old BSON binary subtype for binary data. - -This is the old default subtype, the current -default is :data:`BINARY_SUBTYPE`. -""" - -OLD_UUID_SUBTYPE = 3 -"""Old BSON binary subtype for a UUID. - -:class:`uuid.UUID` instances will automatically be encoded -by :mod:`bson` using this subtype when using -:data:`UuidRepresentation.PYTHON_LEGACY`, -:data:`UuidRepresentation.JAVA_LEGACY`, or -:data:`UuidRepresentation.CSHARP_LEGACY`. - -.. versionadded:: 2.1 -""" - -UUID_SUBTYPE = 4 -"""BSON binary subtype for a UUID. - -This is the standard BSON binary subtype for UUIDs. -:class:`uuid.UUID` instances will automatically be encoded -by :mod:`bson` using this subtype when using -:data:`UuidRepresentation.STANDARD`. -""" - - -if TYPE_CHECKING: - from array import array as _array - from mmap import mmap as _mmap - - -class UuidRepresentation: - UNSPECIFIED = 0 - """An unspecified UUID representation. - - When configured, :class:`uuid.UUID` instances will **not** be - automatically encoded to or decoded from :class:`~bson.binary.Binary`. - When encoding a :class:`uuid.UUID` instance, an error will be raised. - To encode a :class:`uuid.UUID` instance with this configuration, it must - be wrapped in the :class:`~bson.binary.Binary` class by the application - code. When decoding a BSON binary field with a UUID subtype, a - :class:`~bson.binary.Binary` instance will be returned instead of a - :class:`uuid.UUID` instance. - - See :ref:`unspecified-representation-details` for details. - - .. versionadded:: 3.11 - """ - - STANDARD = UUID_SUBTYPE - """The standard UUID representation. - - :class:`uuid.UUID` instances will automatically be encoded to - and decoded from BSON binary, using RFC-4122 byte order with - binary subtype :data:`UUID_SUBTYPE`. - - See :ref:`standard-representation-details` for details. - - .. versionadded:: 3.11 - """ - - PYTHON_LEGACY = OLD_UUID_SUBTYPE - """The Python legacy UUID representation. - - :class:`uuid.UUID` instances will automatically be encoded to - and decoded from BSON binary, using RFC-4122 byte order with - binary subtype :data:`OLD_UUID_SUBTYPE`. - - See :ref:`python-legacy-representation-details` for details. - - .. versionadded:: 3.11 - """ - - JAVA_LEGACY = 5 - """The Java legacy UUID representation. - - :class:`uuid.UUID` instances will automatically be encoded to - and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`, - using the Java driver's legacy byte order. - - See :ref:`java-legacy-representation-details` for details. - - .. versionadded:: 3.11 - """ - - CSHARP_LEGACY = 6 - """The C#/.net legacy UUID representation. - - :class:`uuid.UUID` instances will automatically be encoded to - and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`, - using the C# driver's legacy byte order. - - See :ref:`csharp-legacy-representation-details` for details. - - .. versionadded:: 3.11 - """ - - -STANDARD = UuidRepresentation.STANDARD -"""An alias for :data:`UuidRepresentation.STANDARD`. - -.. versionadded:: 3.0 -""" - -PYTHON_LEGACY = UuidRepresentation.PYTHON_LEGACY -"""An alias for :data:`UuidRepresentation.PYTHON_LEGACY`. - -.. versionadded:: 3.0 -""" - -JAVA_LEGACY = UuidRepresentation.JAVA_LEGACY -"""An alias for :data:`UuidRepresentation.JAVA_LEGACY`. - -.. versionchanged:: 3.6 - BSON binary subtype 4 is decoded using RFC-4122 byte order. -.. versionadded:: 2.3 -""" - -CSHARP_LEGACY = UuidRepresentation.CSHARP_LEGACY -"""An alias for :data:`UuidRepresentation.CSHARP_LEGACY`. - -.. versionchanged:: 3.6 - BSON binary subtype 4 is decoded using RFC-4122 byte order. -.. versionadded:: 2.3 -""" - -ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE) -ALL_UUID_REPRESENTATIONS = ( - UuidRepresentation.UNSPECIFIED, - UuidRepresentation.STANDARD, - UuidRepresentation.PYTHON_LEGACY, - UuidRepresentation.JAVA_LEGACY, - UuidRepresentation.CSHARP_LEGACY, -) -UUID_REPRESENTATION_NAMES = { - UuidRepresentation.UNSPECIFIED: "UuidRepresentation.UNSPECIFIED", - UuidRepresentation.STANDARD: "UuidRepresentation.STANDARD", - UuidRepresentation.PYTHON_LEGACY: "UuidRepresentation.PYTHON_LEGACY", - UuidRepresentation.JAVA_LEGACY: "UuidRepresentation.JAVA_LEGACY", - UuidRepresentation.CSHARP_LEGACY: "UuidRepresentation.CSHARP_LEGACY", -} - -MD5_SUBTYPE = 5 -"""BSON binary subtype for an MD5 hash. -""" - -COLUMN_SUBTYPE = 7 -"""BSON binary subtype for columns. - -.. versionadded:: 4.0 -""" - -SENSITIVE_SUBTYPE = 8 -"""BSON binary subtype for sensitive data. - -.. versionadded:: 4.5 -""" - - -VECTOR_SUBTYPE = 9 -"""BSON binary subtype for densely packed vector data. - -.. versionadded:: 4.10 -""" - - -USER_DEFINED_SUBTYPE = 128 -"""BSON binary subtype for any user defined structure. -""" - - -class BinaryVectorDtype(Enum): - """Datatypes of vector subtype. - - :param FLOAT32: (0x27) Pack list of :class:`float` as float32 - :param INT8: (0x03) Pack list of :class:`int` in [-128, 127] as signed int8 - :param PACKED_BIT: (0x10) Pack list of :class:`int` in [0, 255] as unsigned uint8 - - The `PACKED_BIT` value represents a special case where vector values themselves - can only be of two values (0 or 1) but these are packed together into groups of 8, - a byte. In Python, these are displayed as ints in range [0, 255] - - Each value is of type bytes with a length of one. - - .. versionadded:: 4.10 - """ - - INT8 = b"\x03" - FLOAT32 = b"\x27" - PACKED_BIT = b"\x10" - - -class BinaryVector: - """Vector of numbers along with metadata for binary interoperability. - .. versionadded:: 4.10 - """ - - __slots__ = ("data", "dtype", "padding") - - def __init__(self, data: Sequence[float | int], dtype: BinaryVectorDtype, padding: int = 0): - """ - :param data: Sequence of numbers representing the mathematical vector. - :param dtype: The data type stored in binary - :param padding: The number of bits in the final byte that are to be ignored - when a vector element's size is less than a byte - and the length of the vector is not a multiple of 8. - """ - self.data = data - self.dtype = dtype - self.padding = padding - - def __repr__(self) -> str: - return f"BinaryVector(dtype={self.dtype}, padding={self.padding}, data={self.data})" - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, BinaryVector): - return False - return ( - self.dtype == other.dtype and self.padding == other.padding and self.data == other.data - ) - - -class Binary(bytes): - """Representation of BSON binary data. - - We want to represent Python strings as the BSON string type. - We need to wrap binary data so that we can tell - the difference between what should be considered binary data and - what should be considered a string when we encode to BSON. - - Subtype 9 provides a space-efficient representation of 1-dimensional vector data. - Its data is prepended with two bytes of metadata. - The first (dtype) describes its data type, such as float32 or int8. - The second (padding) prescribes the number of bits to ignore in the final byte. - This is relevant when the element size of the dtype is not a multiple of 8. - - Raises TypeError if `subtype` is not an instance of :class:`int`. - Raises ValueError if `subtype` is not in [0, 256). - - .. note:: - Instances of Binary with subtype 0 will be decoded directly to :class:`bytes`. - - :param data: the binary data to represent. Can be any bytes-like type - that implements the buffer protocol. - :param subtype: the `binary subtype - `_ - to use - - .. versionchanged:: 3.9 - Support any bytes-like type that implements the buffer protocol. - - .. versionchanged:: 4.10 - Addition of vector subtype. - """ - - _type_marker = 5 - __subtype: int - - def __new__( - cls: Type[Binary], - data: Union[memoryview, bytes, _mmap, _array[Any]], - subtype: int = BINARY_SUBTYPE, - ) -> Binary: - if not isinstance(subtype, int): - raise TypeError(f"subtype must be an instance of int, not {type(subtype)}") - if subtype >= 256 or subtype < 0: - raise ValueError("subtype must be contained in [0, 256)") - # Support any type that implements the buffer protocol. - self = bytes.__new__(cls, memoryview(data).tobytes()) - self.__subtype = subtype - return self - - @classmethod - def from_uuid( - cls: Type[Binary], uuid: UUID, uuid_representation: int = UuidRepresentation.STANDARD - ) -> Binary: - """Create a BSON Binary object from a Python UUID. - - Creates a :class:`~bson.binary.Binary` object from a - :class:`uuid.UUID` instance. Assumes that the native - :class:`uuid.UUID` instance uses the byte-order implied by the - provided ``uuid_representation``. - - Raises :exc:`TypeError` if `uuid` is not an instance of - :class:`~uuid.UUID`. - - :param uuid: A :class:`uuid.UUID` instance. - :param uuid_representation: A member of - :class:`~bson.binary.UuidRepresentation`. Default: - :const:`~bson.binary.UuidRepresentation.STANDARD`. - See :ref:`handling-uuid-data-example` for details. - - .. versionadded:: 3.11 - """ - if not isinstance(uuid, UUID): - raise TypeError(f"uuid must be an instance of uuid.UUID, not {type(uuid)}") - - if uuid_representation not in ALL_UUID_REPRESENTATIONS: - raise ValueError( - "uuid_representation must be a value from bson.binary.UuidRepresentation" - ) - - if uuid_representation == UuidRepresentation.UNSPECIFIED: - raise ValueError( - "cannot encode native uuid.UUID with " - "UuidRepresentation.UNSPECIFIED. UUIDs can be manually " - "converted to bson.Binary instances using " - "bson.Binary.from_uuid() or a different UuidRepresentation " - "can be configured. See the documentation for " - "UuidRepresentation for more information." - ) - - subtype = OLD_UUID_SUBTYPE - if uuid_representation == UuidRepresentation.PYTHON_LEGACY: - payload = uuid.bytes - elif uuid_representation == UuidRepresentation.JAVA_LEGACY: - from_uuid = uuid.bytes - payload = from_uuid[0:8][::-1] + from_uuid[8:16][::-1] - elif uuid_representation == UuidRepresentation.CSHARP_LEGACY: - payload = uuid.bytes_le - else: - # uuid_representation == UuidRepresentation.STANDARD - subtype = UUID_SUBTYPE - payload = uuid.bytes - - return cls(payload, subtype) - - def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUID: - """Create a Python UUID from this BSON Binary object. - - Decodes this binary object as a native :class:`uuid.UUID` instance - with the provided ``uuid_representation``. - - Raises :exc:`ValueError` if this :class:`~bson.binary.Binary` instance - does not contain a UUID. - - :param uuid_representation: A member of - :class:`~bson.binary.UuidRepresentation`. Default: - :const:`~bson.binary.UuidRepresentation.STANDARD`. - See :ref:`handling-uuid-data-example` for details. - - .. versionadded:: 3.11 - """ - if self.subtype not in ALL_UUID_SUBTYPES: - raise ValueError(f"cannot decode subtype {self.subtype} as a uuid") - - if uuid_representation not in ALL_UUID_REPRESENTATIONS: - raise ValueError( - "uuid_representation must be a value from bson.binary.UuidRepresentation" - ) - - if uuid_representation == UuidRepresentation.UNSPECIFIED: - raise ValueError("uuid_representation cannot be UNSPECIFIED") - elif uuid_representation == UuidRepresentation.PYTHON_LEGACY: - if self.subtype == OLD_UUID_SUBTYPE: - return UUID(bytes=self) - elif uuid_representation == UuidRepresentation.JAVA_LEGACY: - if self.subtype == OLD_UUID_SUBTYPE: - return UUID(bytes=self[0:8][::-1] + self[8:16][::-1]) - elif uuid_representation == UuidRepresentation.CSHARP_LEGACY: - if self.subtype == OLD_UUID_SUBTYPE: - return UUID(bytes_le=self) - else: - # uuid_representation == UuidRepresentation.STANDARD - if self.subtype == UUID_SUBTYPE: - return UUID(bytes=self) - - raise ValueError( - f"cannot decode subtype {self.subtype} to {UUID_REPRESENTATION_NAMES[uuid_representation]}" - ) - - @classmethod - @overload - def from_vector(cls: Type[Binary], vector: BinaryVector) -> Binary: - ... - - @classmethod - @overload - def from_vector( - cls: Type[Binary], - vector: Union[list[int], list[float]], - dtype: BinaryVectorDtype, - padding: int = 0, - ) -> Binary: - ... - - @classmethod - def from_vector( - cls: Type[Binary], - vector: Union[BinaryVector, list[int], list[float]], - dtype: Optional[BinaryVectorDtype] = None, - padding: Optional[int] = None, - ) -> Binary: - """Create a BSON :class:`~bson.binary.Binary` of Vector subtype. - - To interpret the representation of the numbers, a data type must be included. - See :class:`~bson.binary.BinaryVectorDtype` for available types and descriptions. - - The dtype and padding are prepended to the binary data's value. - - :param vector: Either a List of values, or a :class:`~bson.binary.BinaryVector` dataclass. - :param dtype: Data type of the values - :param padding: For fractional bytes, number of bits to ignore at end of vector. - :return: Binary packed data identified by dtype and padding. - - .. versionadded:: 4.10 - """ - if isinstance(vector, BinaryVector): - if dtype or padding: - raise ValueError( - "The first argument, vector, has type BinaryVector. " - "dtype or padding cannot be separately defined, but were." - ) - dtype = vector.dtype - padding = vector.padding - vector = vector.data # type: ignore - - padding = 0 if padding is None else padding - if dtype == BinaryVectorDtype.INT8: # pack ints in [-128, 127] as signed int8 - format_str = "b" - if padding: - raise ValueError(f"padding does not apply to {dtype=}") - elif dtype == BinaryVectorDtype.PACKED_BIT: # pack ints in [0, 255] as unsigned uint8 - format_str = "B" - if 0 <= padding > 7: - raise ValueError(f"{padding=}. It must be in [0,1, ..7].") - if padding and not vector: - raise ValueError("Empty vector with non-zero padding.") - elif dtype == BinaryVectorDtype.FLOAT32: # pack floats as float32 - format_str = "f" - if padding: - raise ValueError(f"padding does not apply to {dtype=}") - else: - raise NotImplementedError("%s not yet supported" % dtype) - - metadata = struct.pack(" BinaryVector: - """From the Binary, create a list of numbers, along with dtype and padding. - - :return: BinaryVector - - .. versionadded:: 4.10 - """ - - if self.subtype != VECTOR_SUBTYPE: - raise ValueError(f"Cannot decode subtype {self.subtype} as a vector") - - position = 0 - dtype, padding = struct.unpack_from(" int: - """Subtype of this binary data.""" - return self.__subtype - - def __getnewargs__(self) -> Tuple[bytes, int]: # type: ignore[override] - # Work around http://bugs.python.org/issue7382 - data = super().__getnewargs__()[0] - if not isinstance(data, bytes): - data = data.encode("latin-1") - return data, self.__subtype - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Binary): - return (self.__subtype, bytes(self)) == (other.subtype, bytes(other)) - # We don't return NotImplemented here because if we did then - # Binary("foo") == "foo" would return True, since Binary is a - # subclass of str... - return False - - def __hash__(self) -> int: - return super().__hash__() ^ hash(self.__subtype) - - def __ne__(self, other: Any) -> bool: - return not self == other - - def __repr__(self) -> str: - if self.__subtype == SENSITIVE_SUBTYPE: - return f"" - else: - return f"Binary({bytes.__repr__(self)}, {self.__subtype})" diff --git a/venv/lib/python3.12/site-packages/bson/bson-endian.h b/venv/lib/python3.12/site-packages/bson/bson-endian.h deleted file mode 100644 index e906b077..00000000 --- a/venv/lib/python3.12/site-packages/bson/bson-endian.h +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright 2013-2016 MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -#ifndef BSON_ENDIAN_H -#define BSON_ENDIAN_H - - -#if defined(__sun) -# include -#endif - - -#ifdef _MSC_VER -# define BSON_INLINE __inline -#else -# include -# define BSON_INLINE __inline__ -#endif - - -#define BSON_BIG_ENDIAN 4321 -#define BSON_LITTLE_ENDIAN 1234 - - -/* WORDS_BIGENDIAN from pyconfig.h / Python.h */ -#ifdef WORDS_BIGENDIAN -# define BSON_BYTE_ORDER BSON_BIG_ENDIAN -#else -# define BSON_BYTE_ORDER BSON_LITTLE_ENDIAN -#endif - - -#if defined(__sun) -# define BSON_UINT16_SWAP_LE_BE(v) BSWAP_16((uint16_t)v) -# define BSON_UINT32_SWAP_LE_BE(v) BSWAP_32((uint32_t)v) -# define BSON_UINT64_SWAP_LE_BE(v) BSWAP_64((uint64_t)v) -#elif defined(__clang__) && defined(__clang_major__) && defined(__clang_minor__) && \ - (__clang_major__ >= 3) && (__clang_minor__ >= 1) -# if __has_builtin(__builtin_bswap16) -# define BSON_UINT16_SWAP_LE_BE(v) __builtin_bswap16(v) -# endif -# if __has_builtin(__builtin_bswap32) -# define BSON_UINT32_SWAP_LE_BE(v) __builtin_bswap32(v) -# endif -# if __has_builtin(__builtin_bswap64) -# define BSON_UINT64_SWAP_LE_BE(v) __builtin_bswap64(v) -# endif -#elif defined(__GNUC__) && (__GNUC__ >= 4) -# if __GNUC__ >= 4 && defined (__GNUC_MINOR__) && __GNUC_MINOR__ >= 3 -# define BSON_UINT32_SWAP_LE_BE(v) __builtin_bswap32 ((uint32_t)v) -# define BSON_UINT64_SWAP_LE_BE(v) __builtin_bswap64 ((uint64_t)v) -# endif -# if __GNUC__ >= 4 && defined (__GNUC_MINOR__) && __GNUC_MINOR__ >= 8 -# define BSON_UINT16_SWAP_LE_BE(v) __builtin_bswap16 ((uint32_t)v) -# endif -#endif - - -#ifndef BSON_UINT16_SWAP_LE_BE -# define BSON_UINT16_SWAP_LE_BE(v) __bson_uint16_swap_slow ((uint16_t)v) -#endif - - -#ifndef BSON_UINT32_SWAP_LE_BE -# define BSON_UINT32_SWAP_LE_BE(v) __bson_uint32_swap_slow ((uint32_t)v) -#endif - - -#ifndef BSON_UINT64_SWAP_LE_BE -# define BSON_UINT64_SWAP_LE_BE(v) __bson_uint64_swap_slow ((uint64_t)v) -#endif - - -#if BSON_BYTE_ORDER == BSON_LITTLE_ENDIAN -# define BSON_UINT16_FROM_LE(v) ((uint16_t)v) -# define BSON_UINT16_TO_LE(v) ((uint16_t)v) -# define BSON_UINT16_FROM_BE(v) BSON_UINT16_SWAP_LE_BE (v) -# define BSON_UINT16_TO_BE(v) BSON_UINT16_SWAP_LE_BE (v) -# define BSON_UINT32_FROM_LE(v) ((uint32_t)v) -# define BSON_UINT32_TO_LE(v) ((uint32_t)v) -# define BSON_UINT32_FROM_BE(v) BSON_UINT32_SWAP_LE_BE (v) -# define BSON_UINT32_TO_BE(v) BSON_UINT32_SWAP_LE_BE (v) -# define BSON_UINT64_FROM_LE(v) ((uint64_t)v) -# define BSON_UINT64_TO_LE(v) ((uint64_t)v) -# define BSON_UINT64_FROM_BE(v) BSON_UINT64_SWAP_LE_BE (v) -# define BSON_UINT64_TO_BE(v) BSON_UINT64_SWAP_LE_BE (v) -# define BSON_DOUBLE_FROM_LE(v) ((double)v) -# define BSON_DOUBLE_TO_LE(v) ((double)v) -#elif BSON_BYTE_ORDER == BSON_BIG_ENDIAN -# define BSON_UINT16_FROM_LE(v) BSON_UINT16_SWAP_LE_BE (v) -# define BSON_UINT16_TO_LE(v) BSON_UINT16_SWAP_LE_BE (v) -# define BSON_UINT16_FROM_BE(v) ((uint16_t)v) -# define BSON_UINT16_TO_BE(v) ((uint16_t)v) -# define BSON_UINT32_FROM_LE(v) BSON_UINT32_SWAP_LE_BE (v) -# define BSON_UINT32_TO_LE(v) BSON_UINT32_SWAP_LE_BE (v) -# define BSON_UINT32_FROM_BE(v) ((uint32_t)v) -# define BSON_UINT32_TO_BE(v) ((uint32_t)v) -# define BSON_UINT64_FROM_LE(v) BSON_UINT64_SWAP_LE_BE (v) -# define BSON_UINT64_TO_LE(v) BSON_UINT64_SWAP_LE_BE (v) -# define BSON_UINT64_FROM_BE(v) ((uint64_t)v) -# define BSON_UINT64_TO_BE(v) ((uint64_t)v) -# define BSON_DOUBLE_FROM_LE(v) (__bson_double_swap_slow (v)) -# define BSON_DOUBLE_TO_LE(v) (__bson_double_swap_slow (v)) -#else -# error "The endianness of target architecture is unknown." -#endif - - -/* - *-------------------------------------------------------------------------- - * - * __bson_uint16_swap_slow -- - * - * Fallback endianness conversion for 16-bit integers. - * - * Returns: - * The endian swapped version. - * - * Side effects: - * None. - * - *-------------------------------------------------------------------------- - */ - -static BSON_INLINE uint16_t -__bson_uint16_swap_slow (uint16_t v) /* IN */ -{ - return ((v & 0x00FF) << 8) | - ((v & 0xFF00) >> 8); -} - - -/* - *-------------------------------------------------------------------------- - * - * __bson_uint32_swap_slow -- - * - * Fallback endianness conversion for 32-bit integers. - * - * Returns: - * The endian swapped version. - * - * Side effects: - * None. - * - *-------------------------------------------------------------------------- - */ - -static BSON_INLINE uint32_t -__bson_uint32_swap_slow (uint32_t v) /* IN */ -{ - return ((v & 0x000000FFU) << 24) | - ((v & 0x0000FF00U) << 8) | - ((v & 0x00FF0000U) >> 8) | - ((v & 0xFF000000U) >> 24); -} - - -/* - *-------------------------------------------------------------------------- - * - * __bson_uint64_swap_slow -- - * - * Fallback endianness conversion for 64-bit integers. - * - * Returns: - * The endian swapped version. - * - * Side effects: - * None. - * - *-------------------------------------------------------------------------- - */ - -static BSON_INLINE uint64_t -__bson_uint64_swap_slow (uint64_t v) /* IN */ -{ - return ((v & 0x00000000000000FFULL) << 56) | - ((v & 0x000000000000FF00ULL) << 40) | - ((v & 0x0000000000FF0000ULL) << 24) | - ((v & 0x00000000FF000000ULL) << 8) | - ((v & 0x000000FF00000000ULL) >> 8) | - ((v & 0x0000FF0000000000ULL) >> 24) | - ((v & 0x00FF000000000000ULL) >> 40) | - ((v & 0xFF00000000000000ULL) >> 56); -} - - -/* - *-------------------------------------------------------------------------- - * - * __bson_double_swap_slow -- - * - * Fallback endianness conversion for double floating point. - * - * Returns: - * The endian swapped version. - * - * Side effects: - * None. - * - *-------------------------------------------------------------------------- - */ - - -static BSON_INLINE double -__bson_double_swap_slow (double v) /* IN */ -{ - uint64_t uv; - - memcpy(&uv, &v, sizeof(v)); - uv = BSON_UINT64_SWAP_LE_BE(uv); - memcpy(&v, &uv, sizeof(v)); - - return v; -} - - -#endif /* BSON_ENDIAN_H */ diff --git a/venv/lib/python3.12/site-packages/bson/buffer.c b/venv/lib/python3.12/site-packages/bson/buffer.c deleted file mode 100644 index cc752027..00000000 --- a/venv/lib/python3.12/site-packages/bson/buffer.c +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright 2009-2015 MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* Include Python.h so we can set Python's error indicator. */ -#define PY_SSIZE_T_CLEAN -#include "Python.h" - -#include -#include - -#include "buffer.h" - -#define INITIAL_BUFFER_SIZE 256 - -struct buffer { - char* buffer; - int size; - int position; -}; - -/* Set Python's error indicator to MemoryError. - * Called after allocation failures. */ -static void set_memory_error(void) { - PyErr_NoMemory(); -} - -/* Allocate and return a new buffer. - * Return NULL and sets MemoryError on allocation failure. */ -buffer_t pymongo_buffer_new(void) { - buffer_t buffer; - buffer = (buffer_t)malloc(sizeof(struct buffer)); - if (buffer == NULL) { - set_memory_error(); - return NULL; - } - - buffer->size = INITIAL_BUFFER_SIZE; - buffer->position = 0; - buffer->buffer = (char*)malloc(sizeof(char) * INITIAL_BUFFER_SIZE); - if (buffer->buffer == NULL) { - free(buffer); - set_memory_error(); - return NULL; - } - - return buffer; -} - -/* Free the memory allocated for `buffer`. - * Return non-zero on failure. */ -int pymongo_buffer_free(buffer_t buffer) { - if (buffer == NULL) { - return 1; - } - /* Buffer will be NULL when buffer_grow fails. */ - if (buffer->buffer != NULL) { - free(buffer->buffer); - } - free(buffer); - return 0; -} - -/* Grow `buffer` to at least `min_length`. - * Return non-zero and sets MemoryError on allocation failure. */ -static int buffer_grow(buffer_t buffer, int min_length) { - int old_size = 0; - int size = buffer->size; - char* old_buffer = buffer->buffer; - if (size >= min_length) { - return 0; - } - while (size < min_length) { - old_size = size; - size *= 2; - if (size <= old_size) { - /* Size did not increase. Could be an overflow - * or size < 1. Just go with min_length. */ - size = min_length; - } - } - buffer->buffer = (char*)realloc(buffer->buffer, sizeof(char) * size); - if (buffer->buffer == NULL) { - free(old_buffer); - set_memory_error(); - return 1; - } - buffer->size = size; - return 0; -} - -/* Assure that `buffer` has at least `size` free bytes (and grow if needed). - * Return non-zero and sets MemoryError on allocation failure. - * Return non-zero and sets ValueError if `size` would exceed 2GiB. */ -static int buffer_assure_space(buffer_t buffer, int size) { - int new_size = buffer->position + size; - /* Check for overflow. */ - if (new_size < buffer->position) { - PyErr_SetString(PyExc_ValueError, - "Document would overflow BSON size limit"); - return 1; - } - - if (new_size <= buffer->size) { - return 0; - } - return buffer_grow(buffer, new_size); -} - -/* Save `size` bytes from the current position in `buffer` (and grow if needed). - * Return offset for writing, or -1 on failure. - * Sets MemoryError or ValueError on failure. */ -buffer_position pymongo_buffer_save_space(buffer_t buffer, int size) { - int position = buffer->position; - if (buffer_assure_space(buffer, size) != 0) { - return -1; - } - buffer->position += size; - return position; -} - -/* Write `size` bytes from `data` to `buffer` (and grow if needed). - * Return non-zero on failure. - * Sets MemoryError or ValueError on failure. */ -int pymongo_buffer_write(buffer_t buffer, const char* data, int size) { - if (buffer_assure_space(buffer, size) != 0) { - return 1; - } - - memcpy(buffer->buffer + buffer->position, data, size); - buffer->position += size; - return 0; -} - -int pymongo_buffer_get_position(buffer_t buffer) { - return buffer->position; -} - -char* pymongo_buffer_get_buffer(buffer_t buffer) { - return buffer->buffer; -} - -void pymongo_buffer_update_position(buffer_t buffer, buffer_position new_position) { - buffer->position = new_position; -} diff --git a/venv/lib/python3.12/site-packages/bson/buffer.h b/venv/lib/python3.12/site-packages/bson/buffer.h deleted file mode 100644 index a78e34e4..00000000 --- a/venv/lib/python3.12/site-packages/bson/buffer.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2009-2015 MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef BUFFER_H -#define BUFFER_H - -/* Note: if any of these functions return a failure condition then the buffer - * has already been freed. */ - -/* A buffer */ -typedef struct buffer* buffer_t; -/* A position in the buffer */ -typedef int buffer_position; - -/* Allocate and return a new buffer. - * Return NULL on allocation failure. */ -buffer_t pymongo_buffer_new(void); - -/* Free the memory allocated for `buffer`. - * Return non-zero on failure. */ -int pymongo_buffer_free(buffer_t buffer); - -/* Save `size` bytes from the current position in `buffer` (and grow if needed). - * Return offset for writing, or -1 on allocation failure. */ -buffer_position pymongo_buffer_save_space(buffer_t buffer, int size); - -/* Write `size` bytes from `data` to `buffer` (and grow if needed). - * Return non-zero on allocation failure. */ -int pymongo_buffer_write(buffer_t buffer, const char* data, int size); - -/* Getters for the internals of a buffer_t. - * Should try to avoid using these as much as possible - * since they break the abstraction. */ -buffer_position pymongo_buffer_get_position(buffer_t buffer); -char* pymongo_buffer_get_buffer(buffer_t buffer); -void pymongo_buffer_update_position(buffer_t buffer, buffer_position new_position); - -#endif diff --git a/venv/lib/python3.12/site-packages/bson/code.py b/venv/lib/python3.12/site-packages/bson/code.py deleted file mode 100644 index f0523b2a..00000000 --- a/venv/lib/python3.12/site-packages/bson/code.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for representing JavaScript code in BSON.""" -from __future__ import annotations - -from collections.abc import Mapping as _Mapping -from typing import Any, Mapping, Optional, Type, Union - - -class Code(str): - """BSON's JavaScript code type. - - Raises :class:`TypeError` if `code` is not an instance of - :class:`str` or `scope` is not ``None`` or an instance - of :class:`dict`. - - Scope variables can be set by passing a dictionary as the `scope` - argument or by using keyword arguments. If a variable is set as a - keyword argument it will override any setting for that variable in - the `scope` dictionary. - - :param code: A string containing JavaScript code to be evaluated or another - instance of Code. In the latter case, the scope of `code` becomes this - Code's :attr:`scope`. - :param scope: dictionary representing the scope in which - `code` should be evaluated - a mapping from identifiers (as - strings) to values. Defaults to ``None``. This is applied after any - scope associated with a given `code` above. - :param kwargs: scope variables can also be passed as - keyword arguments. These are applied after `scope` and `code`. - - .. versionchanged:: 3.4 - The default value for :attr:`scope` is ``None`` instead of ``{}``. - - """ - - _type_marker = 13 - __scope: Union[Mapping[str, Any], None] - - def __new__( - cls: Type[Code], - code: Union[str, Code], - scope: Optional[Mapping[str, Any]] = None, - **kwargs: Any, - ) -> Code: - if not isinstance(code, str): - raise TypeError(f"code must be an instance of str, not {type(code)}") - - self = str.__new__(cls, code) - - try: - self.__scope = code.scope # type: ignore - except AttributeError: - self.__scope = None - - if scope is not None: - if not isinstance(scope, _Mapping): - raise TypeError(f"scope must be an instance of dict, not {type(scope)}") - if self.__scope is not None: - self.__scope.update(scope) # type: ignore - else: - self.__scope = scope - - if kwargs: - if self.__scope is not None: - self.__scope.update(kwargs) # type: ignore - else: - self.__scope = kwargs - - return self - - @property - def scope(self) -> Optional[Mapping[str, Any]]: - """Scope dictionary for this instance or ``None``.""" - return self.__scope - - def __repr__(self) -> str: - return f"Code({str.__repr__(self)}, {self.__scope!r})" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Code): - return (self.__scope, str(self)) == (other.__scope, str(other)) - return False - - __hash__: Any = None - - def __ne__(self, other: Any) -> bool: - return not self == other diff --git a/venv/lib/python3.12/site-packages/bson/codec_options.py b/venv/lib/python3.12/site-packages/bson/codec_options.py deleted file mode 100644 index 258a777a..00000000 --- a/venv/lib/python3.12/site-packages/bson/codec_options.py +++ /dev/null @@ -1,511 +0,0 @@ -# Copyright 2014-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for specifying BSON codec options.""" -from __future__ import annotations - -import abc -import datetime -import enum -from collections.abc import MutableMapping as _MutableMapping -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Generic, - Iterable, - Mapping, - NamedTuple, - Optional, - Tuple, - Type, - Union, - cast, -) - -from bson.binary import ( - ALL_UUID_REPRESENTATIONS, - UUID_REPRESENTATION_NAMES, - UuidRepresentation, -) -from bson.typings import _DocumentType - -_RAW_BSON_DOCUMENT_MARKER = 101 - - -def _raw_document_class(document_class: Any) -> bool: - """Determine if a document_class is a RawBSONDocument class.""" - marker = getattr(document_class, "_type_marker", None) - return marker == _RAW_BSON_DOCUMENT_MARKER - - -class TypeEncoder(abc.ABC): - """Base class for defining type codec classes which describe how a - custom type can be transformed to one of the types BSON understands. - - Codec classes must implement the ``python_type`` attribute, and the - ``transform_python`` method to support encoding. - - See :ref:`custom-type-type-codec` documentation for an example. - """ - - @abc.abstractproperty - def python_type(self) -> Any: - """The Python type to be converted into something serializable.""" - - @abc.abstractmethod - def transform_python(self, value: Any) -> Any: - """Convert the given Python object into something serializable.""" - - -class TypeDecoder(abc.ABC): - """Base class for defining type codec classes which describe how a - BSON type can be transformed to a custom type. - - Codec classes must implement the ``bson_type`` attribute, and the - ``transform_bson`` method to support decoding. - - See :ref:`custom-type-type-codec` documentation for an example. - """ - - @abc.abstractproperty - def bson_type(self) -> Any: - """The BSON type to be converted into our own type.""" - - @abc.abstractmethod - def transform_bson(self, value: Any) -> Any: - """Convert the given BSON value into our own type.""" - - -class TypeCodec(TypeEncoder, TypeDecoder): - """Base class for defining type codec classes which describe how a - custom type can be transformed to/from one of the types :mod:`bson` - can already encode/decode. - - Codec classes must implement the ``python_type`` attribute, and the - ``transform_python`` method to support encoding, as well as the - ``bson_type`` attribute, and the ``transform_bson`` method to support - decoding. - - See :ref:`custom-type-type-codec` documentation for an example. - """ - - -_Codec = Union[TypeEncoder, TypeDecoder, TypeCodec] -_Fallback = Callable[[Any], Any] - - -class TypeRegistry: - """Encapsulates type codecs used in encoding and / or decoding BSON, as - well as the fallback encoder. Type registries cannot be modified after - instantiation. - - ``TypeRegistry`` can be initialized with an iterable of type codecs, and - a callable for the fallback encoder:: - - >>> from bson.codec_options import TypeRegistry - >>> type_registry = TypeRegistry([Codec1, Codec2, Codec3, ...], - ... fallback_encoder) - - See :ref:`custom-type-type-registry` documentation for an example. - - :param type_codecs: iterable of type codec instances. If - ``type_codecs`` contains multiple codecs that transform a single - python or BSON type, the transformation specified by the type codec - occurring last prevails. A TypeError will be raised if one or more - type codecs modify the encoding behavior of a built-in :mod:`bson` - type. - :param fallback_encoder: callable that accepts a single, - unencodable python value and transforms it into a type that - :mod:`bson` can encode. See :ref:`fallback-encoder-callable` - documentation for an example. - """ - - def __init__( - self, - type_codecs: Optional[Iterable[_Codec]] = None, - fallback_encoder: Optional[_Fallback] = None, - ) -> None: - self.__type_codecs = list(type_codecs or []) - self._fallback_encoder = fallback_encoder - self._encoder_map: dict[Any, Any] = {} - self._decoder_map: dict[Any, Any] = {} - - if self._fallback_encoder is not None: - if not callable(fallback_encoder): - raise TypeError("fallback_encoder %r is not a callable" % (fallback_encoder)) - - for codec in self.__type_codecs: - is_valid_codec = False - if isinstance(codec, TypeEncoder): - self._validate_type_encoder(codec) - is_valid_codec = True - self._encoder_map[codec.python_type] = codec.transform_python - if isinstance(codec, TypeDecoder): - is_valid_codec = True - self._decoder_map[codec.bson_type] = codec.transform_bson - if not is_valid_codec: - raise TypeError( - f"Expected an instance of {TypeEncoder.__name__}, {TypeDecoder.__name__}, or {TypeCodec.__name__}, got {codec!r} instead" - ) - - def _validate_type_encoder(self, codec: _Codec) -> None: - from bson import _BUILT_IN_TYPES - - for pytype in _BUILT_IN_TYPES: - if issubclass(cast(TypeCodec, codec).python_type, pytype): - err_msg = ( - "TypeEncoders cannot change how built-in types are " - f"encoded (encoder {codec} transforms type {pytype})" - ) - raise TypeError(err_msg) - - def __repr__(self) -> str: - return "{}(type_codecs={!r}, fallback_encoder={!r})".format( - self.__class__.__name__, - self.__type_codecs, - self._fallback_encoder, - ) - - def __eq__(self, other: Any) -> Any: - if not isinstance(other, type(self)): - return NotImplemented - return ( - (self._decoder_map == other._decoder_map) - and (self._encoder_map == other._encoder_map) - and (self._fallback_encoder == other._fallback_encoder) - ) - - -class DatetimeConversion(int, enum.Enum): - """Options for decoding BSON datetimes.""" - - DATETIME = 1 - """Decode a BSON UTC datetime as a :class:`datetime.datetime`. - - BSON UTC datetimes that cannot be represented as a - :class:`~datetime.datetime` will raise an :class:`OverflowError` - or a :class:`ValueError`. - - .. versionadded 4.3 - """ - - DATETIME_CLAMP = 2 - """Decode a BSON UTC datetime as a :class:`datetime.datetime`, clamping - to :attr:`~datetime.datetime.min` and :attr:`~datetime.datetime.max`. - - .. versionadded 4.3 - """ - - DATETIME_MS = 3 - """Decode a BSON UTC datetime as a :class:`~bson.datetime_ms.DatetimeMS` - object. - - .. versionadded 4.3 - """ - - DATETIME_AUTO = 4 - """Decode a BSON UTC datetime as a :class:`datetime.datetime` if possible, - and a :class:`~bson.datetime_ms.DatetimeMS` if not. - - .. versionadded 4.3 - """ - - -class _BaseCodecOptions(NamedTuple): - document_class: Type[Mapping[str, Any]] - tz_aware: bool - uuid_representation: int - unicode_decode_error_handler: str - tzinfo: Optional[datetime.tzinfo] - type_registry: TypeRegistry - datetime_conversion: Optional[DatetimeConversion] - - -if TYPE_CHECKING: - - class CodecOptions(Tuple[_DocumentType], Generic[_DocumentType]): - document_class: Type[_DocumentType] - tz_aware: bool - uuid_representation: int - unicode_decode_error_handler: Optional[str] - tzinfo: Optional[datetime.tzinfo] - type_registry: TypeRegistry - datetime_conversion: Optional[int] - - def __new__( - cls: Type[CodecOptions[_DocumentType]], - document_class: Optional[Type[_DocumentType]] = ..., - tz_aware: bool = ..., - uuid_representation: Optional[int] = ..., - unicode_decode_error_handler: Optional[str] = ..., - tzinfo: Optional[datetime.tzinfo] = ..., - type_registry: Optional[TypeRegistry] = ..., - datetime_conversion: Optional[int] = ..., - ) -> CodecOptions[_DocumentType]: - ... - - # CodecOptions API - def with_options(self, **kwargs: Any) -> CodecOptions[Any]: - ... - - def _arguments_repr(self) -> str: - ... - - def _options_dict(self) -> dict[Any, Any]: - ... - - # NamedTuple API - @classmethod - def _make(cls, obj: Iterable[Any]) -> CodecOptions[_DocumentType]: - ... - - def _asdict(self) -> dict[str, Any]: - ... - - def _replace(self, **kwargs: Any) -> CodecOptions[_DocumentType]: - ... - - _source: str - _fields: Tuple[str] - -else: - - class CodecOptions(_BaseCodecOptions): - """Encapsulates options used encoding and / or decoding BSON.""" - - def __init__(self, *args, **kwargs): - """Encapsulates options used encoding and / or decoding BSON. - - The `document_class` option is used to define a custom type for use - decoding BSON documents. Access to the underlying raw BSON bytes for - a document is available using the :class:`~bson.raw_bson.RawBSONDocument` - type:: - - >>> from bson.raw_bson import RawBSONDocument - >>> from bson.codec_options import CodecOptions - >>> codec_options = CodecOptions(document_class=RawBSONDocument) - >>> coll = db.get_collection('test', codec_options=codec_options) - >>> doc = coll.find_one() - >>> doc.raw - '\\x16\\x00\\x00\\x00\\x07_id\\x00[0\\x165\\x91\\x10\\xea\\x14\\xe8\\xc5\\x8b\\x93\\x00' - - The document class can be any type that inherits from - :class:`~collections.abc.MutableMapping`:: - - >>> class AttributeDict(dict): - ... # A dict that supports attribute access. - ... def __getattr__(self, key): - ... return self[key] - ... def __setattr__(self, key, value): - ... self[key] = value - ... - >>> codec_options = CodecOptions(document_class=AttributeDict) - >>> coll = db.get_collection('test', codec_options=codec_options) - >>> doc = coll.find_one() - >>> doc._id - ObjectId('5b3016359110ea14e8c58b93') - - See :doc:`/examples/datetimes` for examples using the `tz_aware` and - `tzinfo` options. - - See :doc:`/examples/uuid` for examples using the `uuid_representation` - option. - - :param document_class: BSON documents returned in queries will be decoded - to an instance of this class. Must be a subclass of - :class:`~collections.abc.MutableMapping`. Defaults to :class:`dict`. - :param tz_aware: If ``True``, BSON datetimes will be decoded to timezone - aware instances of :class:`~datetime.datetime`. Otherwise they will be - naive. Defaults to ``False``. - :param uuid_representation: The BSON representation to use when encoding - and decoding instances of :class:`~uuid.UUID`. Defaults to - :data:`~bson.binary.UuidRepresentation.UNSPECIFIED`. New - applications should consider setting this to - :data:`~bson.binary.UuidRepresentation.STANDARD` for cross language - compatibility. See :ref:`handling-uuid-data-example` for details. - :param unicode_decode_error_handler: The error handler to apply when - a Unicode-related error occurs during BSON decoding that would - otherwise raise :exc:`UnicodeDecodeError`. Valid options include - 'strict', 'replace', 'backslashreplace', 'surrogateescape', and - 'ignore'. Defaults to 'strict'. - :param tzinfo: A :class:`~datetime.tzinfo` subclass that specifies the - timezone to/from which :class:`~datetime.datetime` objects should be - encoded/decoded. - :param type_registry: Instance of :class:`TypeRegistry` used to customize - encoding and decoding behavior. - :param datetime_conversion: Specifies how UTC datetimes should be decoded - within BSON. Valid options include 'datetime_ms' to return as a - DatetimeMS, 'datetime' to return as a datetime.datetime and - raising a ValueError for out-of-range values, 'datetime_auto' to - return DatetimeMS objects when the underlying datetime is - out-of-range and 'datetime_clamp' to clamp to the minimum and - maximum possible datetimes. Defaults to 'datetime'. - - .. versionchanged:: 4.0 - The default for `uuid_representation` was changed from - :const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to - :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`. - - .. versionadded:: 3.8 - `type_registry` attribute. - - .. warning:: Care must be taken when changing - `unicode_decode_error_handler` from its default value ('strict'). - The 'replace' and 'ignore' modes should not be used when documents - retrieved from the server will be modified in the client application - and stored back to the server. - """ - super().__init__() - - def __new__( - cls: Type[CodecOptions], - document_class: Optional[Type[Mapping[str, Any]]] = None, - tz_aware: bool = False, - uuid_representation: Optional[int] = UuidRepresentation.UNSPECIFIED, - unicode_decode_error_handler: str = "strict", - tzinfo: Optional[datetime.tzinfo] = None, - type_registry: Optional[TypeRegistry] = None, - datetime_conversion: Optional[DatetimeConversion] = DatetimeConversion.DATETIME, - ) -> CodecOptions: - doc_class = document_class or dict - # issubclass can raise TypeError for generic aliases like SON[str, Any]. - # In that case we can use the base class for the comparison. - is_mapping = False - try: - is_mapping = issubclass(doc_class, _MutableMapping) - except TypeError: - if hasattr(doc_class, "__origin__"): - is_mapping = issubclass(doc_class.__origin__, _MutableMapping) - if not (is_mapping or _raw_document_class(doc_class)): - raise TypeError( - "document_class must be dict, bson.son.SON, " - "bson.raw_bson.RawBSONDocument, or a " - "subclass of collections.abc.MutableMapping" - ) - if not isinstance(tz_aware, bool): - raise TypeError(f"tz_aware must be True or False, was: tz_aware={tz_aware}") - if uuid_representation not in ALL_UUID_REPRESENTATIONS: - raise ValueError( - "uuid_representation must be a value from bson.binary.UuidRepresentation" - ) - if not isinstance(unicode_decode_error_handler, str): - raise ValueError( - f"unicode_decode_error_handler must be a string, not {type(unicode_decode_error_handler)}" - ) - if tzinfo is not None: - if not isinstance(tzinfo, datetime.tzinfo): - raise TypeError( - f"tzinfo must be an instance of datetime.tzinfo, not {type(tzinfo)}" - ) - if not tz_aware: - raise ValueError("cannot specify tzinfo without also setting tz_aware=True") - - type_registry = type_registry or TypeRegistry() - - if not isinstance(type_registry, TypeRegistry): - raise TypeError( - f"type_registry must be an instance of TypeRegistry, not {type(type_registry)}" - ) - - return tuple.__new__( - cls, - ( - doc_class, - tz_aware, - uuid_representation, - unicode_decode_error_handler, - tzinfo, - type_registry, - datetime_conversion, - ), - ) - - def _arguments_repr(self) -> str: - """Representation of the arguments used to create this object.""" - document_class_repr = ( - "dict" if self.document_class is dict else repr(self.document_class) - ) - - uuid_rep_repr = UUID_REPRESENTATION_NAMES.get( - self.uuid_representation, self.uuid_representation - ) - - return ( - "document_class={}, tz_aware={!r}, uuid_representation={}, " - "unicode_decode_error_handler={!r}, tzinfo={!r}, " - "type_registry={!r}, datetime_conversion={!s}".format( - document_class_repr, - self.tz_aware, - uuid_rep_repr, - self.unicode_decode_error_handler, - self.tzinfo, - self.type_registry, - self.datetime_conversion, - ) - ) - - def _options_dict(self) -> dict[str, Any]: - """Dictionary of the arguments used to create this object.""" - # TODO: PYTHON-2442 use _asdict() instead - return { - "document_class": self.document_class, - "tz_aware": self.tz_aware, - "uuid_representation": self.uuid_representation, - "unicode_decode_error_handler": self.unicode_decode_error_handler, - "tzinfo": self.tzinfo, - "type_registry": self.type_registry, - "datetime_conversion": self.datetime_conversion, - } - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self._arguments_repr()})" - - def with_options(self, **kwargs: Any) -> CodecOptions: - """Make a copy of this CodecOptions, overriding some options:: - - >>> from bson.codec_options import DEFAULT_CODEC_OPTIONS - >>> DEFAULT_CODEC_OPTIONS.tz_aware - False - >>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True) - >>> options.tz_aware - True - - .. versionadded:: 3.5 - """ - opts = self._options_dict() - opts.update(kwargs) - return CodecOptions(**opts) - - -DEFAULT_CODEC_OPTIONS: CodecOptions[dict[str, Any]] = CodecOptions() - - -def _parse_codec_options(options: Any) -> CodecOptions[Any]: - """Parse BSON codec options.""" - kwargs = {} - for k in set(options) & { - "document_class", - "tz_aware", - "uuidrepresentation", - "unicode_decode_error_handler", - "tzinfo", - "type_registry", - "datetime_conversion", - }: - if k == "uuidrepresentation": - kwargs["uuid_representation"] = options[k] - else: - kwargs[k] = options[k] - return CodecOptions(**kwargs) diff --git a/venv/lib/python3.12/site-packages/bson/datetime_ms.py b/venv/lib/python3.12/site-packages/bson/datetime_ms.py deleted file mode 100644 index 679524cb..00000000 --- a/venv/lib/python3.12/site-packages/bson/datetime_ms.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright 2022-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you -# may not use this file except in compliance with the License. You -# may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. See the License for the specific language governing -# permissions and limitations under the License. - -"""Tools for representing the BSON datetime type. - -.. versionadded:: 4.3 -""" -from __future__ import annotations - -import calendar -import datetime -from typing import Any, Union, cast - -from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions, DatetimeConversion -from bson.errors import InvalidBSON -from bson.tz_util import utc - -EPOCH_AWARE = datetime.datetime.fromtimestamp(0, utc) -EPOCH_NAIVE = EPOCH_AWARE.replace(tzinfo=None) -_DATETIME_ERROR_SUGGESTION = ( - "(Consider Using CodecOptions(datetime_conversion=DATETIME_AUTO)" - " or MongoClient(datetime_conversion='DATETIME_AUTO'))." - " See: https://www.mongodb.com/docs/languages/python/pymongo-driver/current/data-formats/dates-and-times/#handling-out-of-range-datetimes" -) - - -class DatetimeMS: - """Represents a BSON UTC datetime.""" - - __slots__ = ("_value",) - - def __init__(self, value: Union[int, datetime.datetime]): - """Represents a BSON UTC datetime. - - BSON UTC datetimes are defined as an int64 of milliseconds since the - Unix epoch. The principal use of DatetimeMS is to represent - datetimes outside the range of the Python builtin - :class:`~datetime.datetime` class when - encoding/decoding BSON. - - To decode UTC datetimes as a ``DatetimeMS``, `datetime_conversion` in - :class:`~bson.codec_options.CodecOptions` must be set to 'datetime_ms' or - 'datetime_auto'. See :ref:`handling-out-of-range-datetimes` for - details. - - :param value: An instance of :class:`datetime.datetime` to be - represented as milliseconds since the Unix epoch, or int of - milliseconds since the Unix epoch. - """ - if isinstance(value, int): - if not (-(2**63) <= value <= 2**63 - 1): - raise OverflowError("Must be a 64-bit integer of milliseconds") - self._value = value - elif isinstance(value, datetime.datetime): - self._value = _datetime_to_millis(value) - else: - raise TypeError(f"{type(value)} is not a valid type for DatetimeMS") - - def __hash__(self) -> int: - return hash(self._value) - - def __repr__(self) -> str: - return type(self).__name__ + "(" + str(self._value) + ")" - - def __lt__(self, other: Union[DatetimeMS, int]) -> bool: - return self._value < other - - def __le__(self, other: Union[DatetimeMS, int]) -> bool: - return self._value <= other - - def __eq__(self, other: Any) -> bool: - if isinstance(other, DatetimeMS): - return self._value == other._value - return False - - def __ne__(self, other: Any) -> bool: - if isinstance(other, DatetimeMS): - return self._value != other._value - return True - - def __gt__(self, other: Union[DatetimeMS, int]) -> bool: - return self._value > other - - def __ge__(self, other: Union[DatetimeMS, int]) -> bool: - return self._value >= other - - _type_marker = 9 - - def as_datetime( - self, codec_options: CodecOptions[Any] = DEFAULT_CODEC_OPTIONS - ) -> datetime.datetime: - """Create a Python :class:`~datetime.datetime` from this DatetimeMS object. - - :param codec_options: A CodecOptions instance for specifying how the - resulting DatetimeMS object will be formatted using ``tz_aware`` - and ``tz_info``. Defaults to - :const:`~bson.codec_options.DEFAULT_CODEC_OPTIONS`. - """ - return cast(datetime.datetime, _millis_to_datetime(self._value, codec_options)) - - def __int__(self) -> int: - return self._value - - -def _datetime_to_millis(dtm: datetime.datetime) -> int: - """Convert datetime to milliseconds since epoch UTC.""" - if dtm.utcoffset() is not None: - dtm = dtm - dtm.utcoffset() # type: ignore - return int(calendar.timegm(dtm.timetuple()) * 1000 + dtm.microsecond // 1000) - - -_MIN_UTC = datetime.datetime.min.replace(tzinfo=utc) -_MAX_UTC = datetime.datetime.max.replace(tzinfo=utc) -_MIN_UTC_MS = _datetime_to_millis(_MIN_UTC) -_MAX_UTC_MS = _datetime_to_millis(_MAX_UTC) - - -# Inclusive min and max for timezones. -def _min_datetime_ms(tz: datetime.tzinfo = utc) -> int: - delta = tz.utcoffset(_MIN_UTC) - if delta is not None: - offset_millis = (delta.days * 86400 + delta.seconds) * 1000 + delta.microseconds // 1000 - else: - offset_millis = 0 - return max(_MIN_UTC_MS, _MIN_UTC_MS - offset_millis) - - -def _max_datetime_ms(tz: datetime.tzinfo = utc) -> int: - delta = tz.utcoffset(_MAX_UTC) - if delta is not None: - offset_millis = (delta.days * 86400 + delta.seconds) * 1000 + delta.microseconds // 1000 - else: - offset_millis = 0 - return min(_MAX_UTC_MS, _MAX_UTC_MS - offset_millis) - - -def _millis_to_datetime( - millis: int, opts: CodecOptions[Any] -) -> Union[datetime.datetime, DatetimeMS]: - """Convert milliseconds since epoch UTC to datetime.""" - if ( - opts.datetime_conversion == DatetimeConversion.DATETIME - or opts.datetime_conversion == DatetimeConversion.DATETIME_CLAMP - or opts.datetime_conversion == DatetimeConversion.DATETIME_AUTO - ): - tz = opts.tzinfo or utc - if opts.datetime_conversion == DatetimeConversion.DATETIME_CLAMP: - millis = max(_min_datetime_ms(tz), min(millis, _max_datetime_ms(tz))) - elif opts.datetime_conversion == DatetimeConversion.DATETIME_AUTO: - if not (_min_datetime_ms(tz) <= millis <= _max_datetime_ms(tz)): - return DatetimeMS(millis) - - diff = ((millis % 1000) + 1000) % 1000 - seconds = (millis - diff) // 1000 - micros = diff * 1000 - - try: - if opts.tz_aware: - dt = EPOCH_AWARE + datetime.timedelta(seconds=seconds, microseconds=micros) - if opts.tzinfo: - dt = dt.astimezone(tz) - return dt - else: - return EPOCH_NAIVE + datetime.timedelta(seconds=seconds, microseconds=micros) - except ArithmeticError as err: - raise InvalidBSON(f"{err} {_DATETIME_ERROR_SUGGESTION}") from err - - elif opts.datetime_conversion == DatetimeConversion.DATETIME_MS: - return DatetimeMS(millis) - else: - raise ValueError("datetime_conversion must be an element of DatetimeConversion") diff --git a/venv/lib/python3.12/site-packages/bson/dbref.py b/venv/lib/python3.12/site-packages/bson/dbref.py deleted file mode 100644 index 40bdb73c..00000000 --- a/venv/lib/python3.12/site-packages/bson/dbref.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright 2009-2015 MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for manipulating DBRefs (references to MongoDB documents).""" -from __future__ import annotations - -from copy import deepcopy -from typing import Any, Mapping, Optional - -from bson._helpers import _getstate_slots, _setstate_slots -from bson.son import SON - - -class DBRef: - """A reference to a document stored in MongoDB.""" - - __slots__ = "__collection", "__id", "__database", "__kwargs" - __getstate__ = _getstate_slots - __setstate__ = _setstate_slots - # DBRef isn't actually a BSON "type" so this number was arbitrarily chosen. - _type_marker = 100 - - def __init__( - self, - collection: str, - id: Any, - database: Optional[str] = None, - _extra: Optional[Mapping[str, Any]] = None, - **kwargs: Any, - ) -> None: - """Initialize a new :class:`DBRef`. - - Raises :class:`TypeError` if `collection` or `database` is not - an instance of :class:`str`. `database` is optional and allows - references to documents to work across databases. Any additional - keyword arguments will create additional fields in the resultant - embedded document. - - :param collection: name of the collection the document is stored in - :param id: the value of the document's ``"_id"`` field - :param database: name of the database to reference - :param kwargs: additional keyword arguments will - create additional, custom fields - - .. seealso:: The MongoDB documentation on `dbrefs `_. - """ - if not isinstance(collection, str): - raise TypeError(f"collection must be an instance of str, not {type(collection)}") - if database is not None and not isinstance(database, str): - raise TypeError(f"database must be an instance of str, not {type(database)}") - - self.__collection = collection - self.__id = id - self.__database = database - kwargs.update(_extra or {}) - self.__kwargs = kwargs - - @property - def collection(self) -> str: - """Get the name of this DBRef's collection.""" - return self.__collection - - @property - def id(self) -> Any: - """Get this DBRef's _id.""" - return self.__id - - @property - def database(self) -> Optional[str]: - """Get the name of this DBRef's database. - - Returns None if this DBRef doesn't specify a database. - """ - return self.__database - - def __getattr__(self, key: Any) -> Any: - try: - return self.__kwargs[key] - except KeyError: - raise AttributeError(key) from None - - def as_doc(self) -> SON[str, Any]: - """Get the SON document representation of this DBRef. - - Generally not needed by application developers - """ - doc = SON([("$ref", self.collection), ("$id", self.id)]) - if self.database is not None: - doc["$db"] = self.database - doc.update(self.__kwargs) - return doc - - def __repr__(self) -> str: - extra = "".join([f", {k}={v!r}" for k, v in self.__kwargs.items()]) - if self.database is None: - return f"DBRef({self.collection!r}, {self.id!r}{extra})" - return f"DBRef({self.collection!r}, {self.id!r}, {self.database!r}{extra})" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, DBRef): - us = (self.__database, self.__collection, self.__id, self.__kwargs) - them = (other.__database, other.__collection, other.__id, other.__kwargs) - return us == them - return NotImplemented - - def __ne__(self, other: Any) -> bool: - return not self == other - - def __hash__(self) -> int: - """Get a hash value for this :class:`DBRef`.""" - return hash( - (self.__collection, self.__id, self.__database, tuple(sorted(self.__kwargs.items()))) - ) - - def __deepcopy__(self, memo: Any) -> DBRef: - """Support function for `copy.deepcopy()`.""" - return DBRef( - deepcopy(self.__collection, memo), - deepcopy(self.__id, memo), - deepcopy(self.__database, memo), - deepcopy(self.__kwargs, memo), - ) diff --git a/venv/lib/python3.12/site-packages/bson/decimal128.py b/venv/lib/python3.12/site-packages/bson/decimal128.py deleted file mode 100644 index 92c054d8..00000000 --- a/venv/lib/python3.12/site-packages/bson/decimal128.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2016-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for working with the BSON decimal128 type. - -.. versionadded:: 3.4 -""" -from __future__ import annotations - -import decimal -import struct -from typing import Any, Sequence, Tuple, Type, Union - -_PACK_64 = struct.Struct(" decimal.Context: - """Returns an instance of :class:`decimal.Context` appropriate - for working with IEEE-754 128-bit decimal floating point values. - """ - opts = _CTX_OPTIONS.copy() - opts["traps"] = [] - return decimal.Context(**opts) # type: ignore - - -def _decimal_to_128(value: _VALUE_OPTIONS) -> Tuple[int, int]: - """Converts a decimal.Decimal to BID (high bits, low bits). - - :param value: An instance of decimal.Decimal - """ - with decimal.localcontext(_DEC128_CTX) as ctx: - value = ctx.create_decimal(value) - - if value.is_infinite(): - return _NINF if value.is_signed() else _PINF - - sign, digits, exponent = value.as_tuple() - - if value.is_nan(): - if digits: - raise ValueError("NaN with debug payload is not supported") - if value.is_snan(): - return _NSNAN if value.is_signed() else _PSNAN - return _NNAN if value.is_signed() else _PNAN - - significand = int("".join([str(digit) for digit in digits])) - bit_length = significand.bit_length() - - high = 0 - low = 0 - for i in range(min(64, bit_length)): - if significand & (1 << i): - low |= 1 << i - - for i in range(64, bit_length): - if significand & (1 << i): - high |= 1 << (i - 64) - - biased_exponent = exponent + _EXPONENT_BIAS # type: ignore[operator] - - if high >> 49 == 1: - high = high & 0x7FFFFFFFFFFF - high |= _EXPONENT_MASK - high |= (biased_exponent & 0x3FFF) << 47 - else: - high |= biased_exponent << 49 - - if sign: - high |= _SIGN - - return high, low - - -class Decimal128: - """BSON Decimal128 type:: - - >>> Decimal128(Decimal("0.0005")) - Decimal128('0.0005') - >>> Decimal128("0.0005") - Decimal128('0.0005') - >>> Decimal128((3474527112516337664, 5)) - Decimal128('0.0005') - - :param value: An instance of :class:`decimal.Decimal`, string, or tuple of - (high bits, low bits) from Binary Integer Decimal (BID) format. - - .. note:: :class:`~Decimal128` uses an instance of :class:`decimal.Context` - configured for IEEE-754 Decimal128 when validating parameters. - Signals like :class:`decimal.InvalidOperation`, :class:`decimal.Inexact`, - and :class:`decimal.Overflow` are trapped and raised as exceptions:: - - >>> Decimal128(".13.1") - Traceback (most recent call last): - File "", line 1, in - ... - decimal.InvalidOperation: [] - >>> - >>> Decimal128("1E-6177") - Traceback (most recent call last): - File "", line 1, in - ... - decimal.Inexact: [] - >>> - >>> Decimal128("1E6145") - Traceback (most recent call last): - File "", line 1, in - ... - decimal.Overflow: [, ] - - To ensure the result of a calculation can always be stored as BSON - Decimal128 use the context returned by - :func:`create_decimal128_context`:: - - >>> import decimal - >>> decimal128_ctx = create_decimal128_context() - >>> with decimal.localcontext(decimal128_ctx) as ctx: - ... Decimal128(ctx.create_decimal(".13.3")) - ... - Decimal128('NaN') - >>> - >>> with decimal.localcontext(decimal128_ctx) as ctx: - ... Decimal128(ctx.create_decimal("1E-6177")) - ... - Decimal128('0E-6176') - >>> - >>> with decimal.localcontext(DECIMAL128_CTX) as ctx: - ... Decimal128(ctx.create_decimal("1E6145")) - ... - Decimal128('Infinity') - - To match the behavior of MongoDB's Decimal128 implementation - str(Decimal(value)) may not match str(Decimal128(value)) for NaN values:: - - >>> Decimal128(Decimal('NaN')) - Decimal128('NaN') - >>> Decimal128(Decimal('-NaN')) - Decimal128('NaN') - >>> Decimal128(Decimal('sNaN')) - Decimal128('NaN') - >>> Decimal128(Decimal('-sNaN')) - Decimal128('NaN') - - However, :meth:`~Decimal128.to_decimal` will return the exact value:: - - >>> Decimal128(Decimal('NaN')).to_decimal() - Decimal('NaN') - >>> Decimal128(Decimal('-NaN')).to_decimal() - Decimal('-NaN') - >>> Decimal128(Decimal('sNaN')).to_decimal() - Decimal('sNaN') - >>> Decimal128(Decimal('-sNaN')).to_decimal() - Decimal('-sNaN') - - Two instances of :class:`Decimal128` compare equal if their Binary - Integer Decimal encodings are equal:: - - >>> Decimal128('NaN') == Decimal128('NaN') - True - >>> Decimal128('NaN').bid == Decimal128('NaN').bid - True - - This differs from :class:`decimal.Decimal` comparisons for NaN:: - - >>> Decimal('NaN') == Decimal('NaN') - False - """ - - __slots__ = ("__high", "__low") - - _type_marker = 19 - - def __init__(self, value: _VALUE_OPTIONS) -> None: - if isinstance(value, (str, decimal.Decimal)): - self.__high, self.__low = _decimal_to_128(value) - elif isinstance(value, (list, tuple)): - if len(value) != 2: - raise ValueError( - "Invalid size for creation of Decimal128 " - "from list or tuple. Must have exactly 2 " - "elements." - ) - self.__high, self.__low = value - else: - raise TypeError(f"Cannot convert {value!r} to Decimal128") - - def to_decimal(self) -> decimal.Decimal: - """Returns an instance of :class:`decimal.Decimal` for this - :class:`Decimal128`. - """ - high = self.__high - low = self.__low - sign = 1 if (high & _SIGN) else 0 - - if (high & _SNAN) == _SNAN: - return decimal.Decimal((sign, (), "N")) # type: ignore - elif (high & _NAN) == _NAN: - return decimal.Decimal((sign, (), "n")) # type: ignore - elif (high & _INF) == _INF: - return decimal.Decimal((sign, (), "F")) # type: ignore - - if (high & _EXPONENT_MASK) == _EXPONENT_MASK: - exponent = ((high & 0x1FFFE00000000000) >> 47) - _EXPONENT_BIAS - return decimal.Decimal((sign, (0,), exponent)) - else: - exponent = ((high & 0x7FFF800000000000) >> 49) - _EXPONENT_BIAS - - arr = bytearray(15) - mask = 0x00000000000000FF - for i in range(14, 6, -1): - arr[i] = (low & mask) >> ((14 - i) << 3) - mask = mask << 8 - - mask = 0x00000000000000FF - for i in range(6, 0, -1): - arr[i] = (high & mask) >> ((6 - i) << 3) - mask = mask << 8 - - mask = 0x0001000000000000 - arr[0] = (high & mask) >> 48 - - # cdecimal only accepts a tuple for digits. - digits = tuple(int(digit) for digit in str(int.from_bytes(arr, "big"))) - - with decimal.localcontext(_DEC128_CTX) as ctx: - return ctx.create_decimal((sign, digits, exponent)) - - @classmethod - def from_bid(cls: Type[Decimal128], value: bytes) -> Decimal128: - """Create an instance of :class:`Decimal128` from Binary Integer - Decimal string. - - :param value: 16 byte string (128-bit IEEE 754-2008 decimal floating - point in Binary Integer Decimal (BID) format). - """ - if not isinstance(value, bytes): - raise TypeError(f"value must be an instance of bytes, not {type(value)}") - if len(value) != 16: - raise ValueError("value must be exactly 16 bytes") - return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0])) # type: ignore - - @property - def bid(self) -> bytes: - """The Binary Integer Decimal (BID) encoding of this instance.""" - return _PACK_64(self.__low) + _PACK_64(self.__high) - - def __str__(self) -> str: - dec = self.to_decimal() - if dec.is_nan(): - # Required by the drivers spec to match MongoDB behavior. - return "NaN" - return str(dec) - - def __repr__(self) -> str: - return f"Decimal128('{self!s}')" - - def __setstate__(self, value: Tuple[int, int]) -> None: - self.__high, self.__low = value - - def __getstate__(self) -> Tuple[int, int]: - return self.__high, self.__low - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Decimal128): - return self.bid == other.bid - return NotImplemented - - def __ne__(self, other: Any) -> bool: - return not self == other diff --git a/venv/lib/python3.12/site-packages/bson/errors.py b/venv/lib/python3.12/site-packages/bson/errors.py deleted file mode 100644 index a3699e70..00000000 --- a/venv/lib/python3.12/site-packages/bson/errors.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Exceptions raised by the BSON package.""" -from __future__ import annotations - - -class BSONError(Exception): - """Base class for all BSON exceptions.""" - - -class InvalidBSON(BSONError): - """Raised when trying to create a BSON object from invalid data.""" - - -class InvalidStringData(BSONError): - """Raised when trying to encode a string containing non-UTF8 data.""" - - -class InvalidDocument(BSONError): - """Raised when trying to create a BSON object from an invalid document.""" - - -class InvalidId(BSONError): - """Raised when trying to create an ObjectId from invalid data.""" diff --git a/venv/lib/python3.12/site-packages/bson/int64.py b/venv/lib/python3.12/site-packages/bson/int64.py deleted file mode 100644 index 5846504a..00000000 --- a/venv/lib/python3.12/site-packages/bson/int64.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2014-2015 MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""A BSON wrapper for long (int in python3)""" -from __future__ import annotations - -from typing import Any - - -class Int64(int): - """Representation of the BSON int64 type. - - This is necessary because every integral number is an :class:`int` in - Python 3. Small integral numbers are encoded to BSON int32 by default, - but Int64 numbers will always be encoded to BSON int64. - - :param value: the numeric value to represent - """ - - __slots__ = () - - _type_marker = 18 - - def __getstate__(self) -> Any: - return {} - - def __setstate__(self, state: Any) -> None: - pass diff --git a/venv/lib/python3.12/site-packages/bson/json_util.py b/venv/lib/python3.12/site-packages/bson/json_util.py deleted file mode 100644 index ecae103b..00000000 --- a/venv/lib/python3.12/site-packages/bson/json_util.py +++ /dev/null @@ -1,1164 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for using Python's :mod:`json` module with BSON documents. - -This module provides two helper methods `dumps` and `loads` that wrap the -native :mod:`json` methods and provide explicit BSON conversion to and from -JSON. :class:`~bson.json_util.JSONOptions` provides a way to control how JSON -is emitted and parsed, with the default being the Relaxed Extended JSON format. -:mod:`~bson.json_util` can also generate Canonical or legacy `Extended JSON`_ -when :const:`CANONICAL_JSON_OPTIONS` or :const:`LEGACY_JSON_OPTIONS` is -provided, respectively. - -.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json/extended-json.md - -Example usage (deserialization): - -.. doctest:: - - >>> from bson.json_util import loads - >>> loads( - ... '[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$scope": {}, "$code": "function x() { return 1; }"}}, {"bin": {"$type": "80", "$binary": "AQIDBA=="}}]' - ... ) - [{'foo': [1, 2]}, {'bar': {'hello': 'world'}}, {'code': Code('function x() { return 1; }', {})}, {'bin': Binary(b'...', 128)}] - -Example usage with :const:`RELAXED_JSON_OPTIONS` (the default): - -.. doctest:: - - >>> from bson import Binary, Code - >>> from bson.json_util import dumps - >>> dumps( - ... [ - ... {"foo": [1, 2]}, - ... {"bar": {"hello": "world"}}, - ... {"code": Code("function x() { return 1; }")}, - ... {"bin": Binary(b"\x01\x02\x03\x04")}, - ... ] - ... ) - '[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]' - -Example usage (with :const:`CANONICAL_JSON_OPTIONS`): - -.. doctest:: - - >>> from bson import Binary, Code - >>> from bson.json_util import dumps, CANONICAL_JSON_OPTIONS - >>> dumps( - ... [ - ... {"foo": [1, 2]}, - ... {"bar": {"hello": "world"}}, - ... {"code": Code("function x() { return 1; }")}, - ... {"bin": Binary(b"\x01\x02\x03\x04")}, - ... ], - ... json_options=CANONICAL_JSON_OPTIONS, - ... ) - '[{"foo": [{"$numberInt": "1"}, {"$numberInt": "2"}]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]' - -Example usage (with :const:`LEGACY_JSON_OPTIONS`): - -.. doctest:: - - >>> from bson import Binary, Code - >>> from bson.json_util import dumps, LEGACY_JSON_OPTIONS - >>> dumps( - ... [ - ... {"foo": [1, 2]}, - ... {"bar": {"hello": "world"}}, - ... {"code": Code("function x() { return 1; }", {})}, - ... {"bin": Binary(b"\x01\x02\x03\x04")}, - ... ], - ... json_options=LEGACY_JSON_OPTIONS, - ... ) - '[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }", "$scope": {}}}, {"bin": {"$binary": "AQIDBA==", "$type": "00"}}]' - -Alternatively, you can manually pass the `default` to :func:`json.dumps`. -It won't handle :class:`~bson.binary.Binary` and :class:`~bson.code.Code` -instances (as they are extended strings you can't provide custom defaults), -but it will be faster as there is less recursion. - -.. note:: - If your application does not need the flexibility offered by - :class:`JSONOptions` and spends a large amount of time in the `json_util` - module, look to - `python-bsonjs `_ for a nice - performance improvement. `python-bsonjs` is a fast BSON to MongoDB - Extended JSON converter for Python built on top of - `libbson `_. `python-bsonjs` works best - with PyMongo when using :class:`~bson.raw_bson.RawBSONDocument`. -""" -from __future__ import annotations - -import base64 -import datetime -import json -import math -import re -import uuid -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Mapping, - MutableMapping, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -from bson.binary import ALL_UUID_SUBTYPES, UUID_SUBTYPE, Binary, UuidRepresentation -from bson.code import Code -from bson.codec_options import CodecOptions, DatetimeConversion -from bson.datetime_ms import ( - _MAX_UTC_MS, - EPOCH_AWARE, - DatetimeMS, - _datetime_to_millis, - _millis_to_datetime, -) -from bson.dbref import DBRef -from bson.decimal128 import Decimal128 -from bson.int64 import Int64 -from bson.max_key import MaxKey -from bson.min_key import MinKey -from bson.objectid import ObjectId -from bson.regex import Regex -from bson.son import RE_TYPE -from bson.timestamp import Timestamp -from bson.tz_util import utc - -_RE_OPT_TABLE = { - "i": re.I, - "l": re.L, - "m": re.M, - "s": re.S, - "u": re.U, - "x": re.X, -} - - -class DatetimeRepresentation: - LEGACY = 0 - """Legacy MongoDB Extended JSON datetime representation. - - :class:`datetime.datetime` instances will be encoded to JSON in the - format `{"$date": }`, where `dateAsMilliseconds` is - a 64-bit signed integer giving the number of milliseconds since the Unix - epoch UTC. This was the default encoding before PyMongo version 3.4. - - .. versionadded:: 3.4 - """ - - NUMBERLONG = 1 - """NumberLong datetime representation. - - :class:`datetime.datetime` instances will be encoded to JSON in the - format `{"$date": {"$numberLong": ""}}`, - where `dateAsMilliseconds` is the string representation of a 64-bit signed - integer giving the number of milliseconds since the Unix epoch UTC. - - .. versionadded:: 3.4 - """ - - ISO8601 = 2 - """ISO-8601 datetime representation. - - :class:`datetime.datetime` instances greater than or equal to the Unix - epoch UTC will be encoded to JSON in the format `{"$date": ""}`. - :class:`datetime.datetime` instances before the Unix epoch UTC will be - encoded as if the datetime representation is - :const:`~DatetimeRepresentation.NUMBERLONG`. - - .. versionadded:: 3.4 - """ - - -class JSONMode: - LEGACY = 0 - """Legacy Extended JSON representation. - - In this mode, :func:`~bson.json_util.dumps` produces PyMongo's legacy - non-standard JSON output. Consider using - :const:`~bson.json_util.JSONMode.RELAXED` or - :const:`~bson.json_util.JSONMode.CANONICAL` instead. - - .. versionadded:: 3.5 - """ - - RELAXED = 1 - """Relaxed Extended JSON representation. - - In this mode, :func:`~bson.json_util.dumps` produces Relaxed Extended JSON, - a mostly JSON-like format. Consider using this for things like a web API, - where one is sending a document (or a projection of a document) that only - uses ordinary JSON type primitives. In particular, the ``int``, - :class:`~bson.int64.Int64`, and ``float`` numeric types are represented in - the native JSON number format. This output is also the most human readable - and is useful for debugging and documentation. - - .. seealso:: The specification for Relaxed `Extended JSON`_. - - .. versionadded:: 3.5 - """ - - CANONICAL = 2 - """Canonical Extended JSON representation. - - In this mode, :func:`~bson.json_util.dumps` produces Canonical Extended - JSON, a type preserving format. Consider using this for things like - testing, where one has to precisely specify expected types in JSON. In - particular, the ``int``, :class:`~bson.int64.Int64`, and ``float`` numeric - types are encoded with type wrappers. - - .. seealso:: The specification for Canonical `Extended JSON`_. - - .. versionadded:: 3.5 - """ - - -if TYPE_CHECKING: - _BASE_CLASS = CodecOptions[MutableMapping[str, Any]] -else: - _BASE_CLASS = CodecOptions - -_INT32_MAX = 2**31 - - -class JSONOptions(_BASE_CLASS): - json_mode: int - strict_number_long: bool - datetime_representation: int - strict_uuid: bool - document_class: Type[MutableMapping[str, Any]] - - def __init__(self, *args: Any, **kwargs: Any): - """Encapsulates JSON options for :func:`dumps` and :func:`loads`. - - :param strict_number_long: If ``True``, :class:`~bson.int64.Int64` objects - are encoded to MongoDB Extended JSON's *Strict mode* type - `NumberLong`, ie ``'{"$numberLong": "" }'``. Otherwise they - will be encoded as an `int`. Defaults to ``False``. - :param datetime_representation: The representation to use when encoding - instances of :class:`datetime.datetime`. Defaults to - :const:`~DatetimeRepresentation.LEGACY`. - :param strict_uuid: If ``True``, :class:`uuid.UUID` object are encoded to - MongoDB Extended JSON's *Strict mode* type `Binary`. Otherwise it - will be encoded as ``'{"$uuid": "" }'``. Defaults to ``False``. - :param json_mode: The :class:`JSONMode` to use when encoding BSON types to - Extended JSON. Defaults to :const:`~JSONMode.LEGACY`. - :param document_class: BSON documents returned by :func:`loads` will be - decoded to an instance of this class. Must be a subclass of - :class:`collections.MutableMapping`. Defaults to :class:`dict`. - :param uuid_representation: The :class:`~bson.binary.UuidRepresentation` - to use when encoding and decoding instances of :class:`uuid.UUID`. - Defaults to :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`. - :param tz_aware: If ``True``, MongoDB Extended JSON's *Strict mode* type - `Date` will be decoded to timezone aware instances of - :class:`datetime.datetime`. Otherwise they will be naive. Defaults - to ``False``. - :param tzinfo: A :class:`datetime.tzinfo` subclass that specifies the - timezone from which :class:`~datetime.datetime` objects should be - decoded. Defaults to :const:`~bson.tz_util.utc`. - :param datetime_conversion: Specifies how UTC datetimes should be decoded - within BSON. Valid options include 'datetime_ms' to return as a - DatetimeMS, 'datetime' to return as a datetime.datetime and - raising a ValueError for out-of-range values, 'datetime_auto' to - return DatetimeMS objects when the underlying datetime is - out-of-range and 'datetime_clamp' to clamp to the minimum and - maximum possible datetimes. Defaults to 'datetime'. See - :ref:`handling-out-of-range-datetimes` for details. - :param args: arguments to :class:`~bson.codec_options.CodecOptions` - :param kwargs: arguments to :class:`~bson.codec_options.CodecOptions` - - .. seealso:: The specification for Relaxed and Canonical `Extended JSON`_. - - .. versionchanged:: 4.0 - The default for `json_mode` was changed from :const:`JSONMode.LEGACY` - to :const:`JSONMode.RELAXED`. - The default for `uuid_representation` was changed from - :const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to - :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`. - - .. versionchanged:: 3.5 - Accepts the optional parameter `json_mode`. - - .. versionchanged:: 4.0 - Changed default value of `tz_aware` to False. - """ - super().__init__() - - def __new__( - cls: Type[JSONOptions], - strict_number_long: Optional[bool] = None, - datetime_representation: Optional[int] = None, - strict_uuid: Optional[bool] = None, - json_mode: int = JSONMode.RELAXED, - *args: Any, - **kwargs: Any, - ) -> JSONOptions: - kwargs["tz_aware"] = kwargs.get("tz_aware", False) - if kwargs["tz_aware"]: - kwargs["tzinfo"] = kwargs.get("tzinfo", utc) - if datetime_representation not in ( - DatetimeRepresentation.LEGACY, - DatetimeRepresentation.NUMBERLONG, - DatetimeRepresentation.ISO8601, - None, - ): - raise ValueError( - "JSONOptions.datetime_representation must be one of LEGACY, " - "NUMBERLONG, or ISO8601 from DatetimeRepresentation." - ) - self = cast(JSONOptions, super().__new__(cls, *args, **kwargs)) - if json_mode not in (JSONMode.LEGACY, JSONMode.RELAXED, JSONMode.CANONICAL): - raise ValueError( - "JSONOptions.json_mode must be one of LEGACY, RELAXED, " - "or CANONICAL from JSONMode." - ) - self.json_mode = json_mode - if self.json_mode == JSONMode.RELAXED: - if strict_number_long: - raise ValueError("Cannot specify strict_number_long=True with JSONMode.RELAXED") - if datetime_representation not in (None, DatetimeRepresentation.ISO8601): - raise ValueError( - "datetime_representation must be DatetimeRepresentation." - "ISO8601 or omitted with JSONMode.RELAXED" - ) - if strict_uuid not in (None, True): - raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED") - self.strict_number_long = False - self.datetime_representation = DatetimeRepresentation.ISO8601 - self.strict_uuid = True - elif self.json_mode == JSONMode.CANONICAL: - if strict_number_long not in (None, True): - raise ValueError("Cannot specify strict_number_long=False with JSONMode.RELAXED") - if datetime_representation not in (None, DatetimeRepresentation.NUMBERLONG): - raise ValueError( - "datetime_representation must be DatetimeRepresentation." - "NUMBERLONG or omitted with JSONMode.RELAXED" - ) - if strict_uuid not in (None, True): - raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED") - self.strict_number_long = True - self.datetime_representation = DatetimeRepresentation.NUMBERLONG - self.strict_uuid = True - else: # JSONMode.LEGACY - self.strict_number_long = False - self.datetime_representation = DatetimeRepresentation.LEGACY - self.strict_uuid = False - if strict_number_long is not None: - self.strict_number_long = strict_number_long - if datetime_representation is not None: - self.datetime_representation = datetime_representation - if strict_uuid is not None: - self.strict_uuid = strict_uuid - return self - - def _arguments_repr(self) -> str: - return ( - "strict_number_long={!r}, " - "datetime_representation={!r}, " - "strict_uuid={!r}, json_mode={!r}, {}".format( - self.strict_number_long, - self.datetime_representation, - self.strict_uuid, - self.json_mode, - super()._arguments_repr(), - ) - ) - - def _options_dict(self) -> dict[Any, Any]: - # TODO: PYTHON-2442 use _asdict() instead - options_dict = super()._options_dict() - options_dict.update( - { - "strict_number_long": self.strict_number_long, - "datetime_representation": self.datetime_representation, - "strict_uuid": self.strict_uuid, - "json_mode": self.json_mode, - } - ) - return options_dict - - def with_options(self, **kwargs: Any) -> JSONOptions: - """ - Make a copy of this JSONOptions, overriding some options:: - - >>> from bson.json_util import CANONICAL_JSON_OPTIONS - >>> CANONICAL_JSON_OPTIONS.tz_aware - True - >>> json_options = CANONICAL_JSON_OPTIONS.with_options(tz_aware=False, tzinfo=None) - >>> json_options.tz_aware - False - - .. versionadded:: 3.12 - """ - opts = self._options_dict() - for opt in ("strict_number_long", "datetime_representation", "strict_uuid", "json_mode"): - opts[opt] = kwargs.get(opt, getattr(self, opt)) - opts.update(kwargs) - return JSONOptions(**opts) - - -LEGACY_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.LEGACY) -""":class:`JSONOptions` for encoding to PyMongo's legacy JSON format. - -.. seealso:: The documentation for :const:`bson.json_util.JSONMode.LEGACY`. - -.. versionadded:: 3.5 -""" - -CANONICAL_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.CANONICAL) -""":class:`JSONOptions` for Canonical Extended JSON. - -.. seealso:: The documentation for :const:`bson.json_util.JSONMode.CANONICAL`. - -.. versionadded:: 3.5 -""" - -RELAXED_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.RELAXED) -""":class:`JSONOptions` for Relaxed Extended JSON. - -.. seealso:: The documentation for :const:`bson.json_util.JSONMode.RELAXED`. - -.. versionadded:: 3.5 -""" - -DEFAULT_JSON_OPTIONS: JSONOptions = RELAXED_JSON_OPTIONS -"""The default :class:`JSONOptions` for JSON encoding/decoding. - -The same as :const:`RELAXED_JSON_OPTIONS`. - -.. versionchanged:: 4.0 - Changed from :const:`LEGACY_JSON_OPTIONS` to - :const:`RELAXED_JSON_OPTIONS`. - -.. versionadded:: 3.4 -""" - - -def dumps(obj: Any, *args: Any, **kwargs: Any) -> str: - """Helper function that wraps :func:`json.dumps`. - - Recursive function that handles all BSON types including - :class:`~bson.binary.Binary` and :class:`~bson.code.Code`. - - :param json_options: A :class:`JSONOptions` instance used to modify the - encoding of MongoDB Extended JSON types. Defaults to - :const:`DEFAULT_JSON_OPTIONS`. - - .. versionchanged:: 4.0 - Now outputs MongoDB Relaxed Extended JSON by default (using - :const:`DEFAULT_JSON_OPTIONS`). - - .. versionchanged:: 3.4 - Accepts optional parameter `json_options`. See :class:`JSONOptions`. - """ - json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS) - return json.dumps(_json_convert(obj, json_options), *args, **kwargs) - - -def loads(s: Union[str, bytes, bytearray], *args: Any, **kwargs: Any) -> Any: - """Helper function that wraps :func:`json.loads`. - - Automatically passes the object_hook for BSON type conversion. - - Raises ``TypeError``, ``ValueError``, ``KeyError``, or - :exc:`~bson.errors.InvalidId` on invalid MongoDB Extended JSON. - - :param json_options: A :class:`JSONOptions` instance used to modify the - decoding of MongoDB Extended JSON types. Defaults to - :const:`DEFAULT_JSON_OPTIONS`. - - .. versionchanged:: 4.0 - Now loads :class:`datetime.datetime` instances as naive by default. To - load timezone aware instances utilize the `json_options` parameter. - See :ref:`tz_aware_default_change` for an example. - - .. versionchanged:: 3.5 - Parses Relaxed and Canonical Extended JSON as well as PyMongo's legacy - format. Now raises ``TypeError`` or ``ValueError`` when parsing JSON - type wrappers with values of the wrong type or any extra keys. - - .. versionchanged:: 3.4 - Accepts optional parameter `json_options`. See :class:`JSONOptions`. - """ - json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS) - # Execution time optimization if json_options.document_class is dict - if json_options.document_class is dict: - kwargs["object_hook"] = lambda obj: object_hook(obj, json_options) - else: - kwargs["object_pairs_hook"] = lambda pairs: object_pairs_hook(pairs, json_options) - return json.loads(s, *args, **kwargs) - - -def _json_convert(obj: Any, json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any: - """Recursive helper method that converts BSON types so they can be - converted into json. - """ - if hasattr(obj, "items"): - return {k: _json_convert(v, json_options) for k, v in obj.items()} - elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes)): - return [_json_convert(v, json_options) for v in obj] - try: - return default(obj, json_options) - except TypeError: - return obj - - -def object_pairs_hook( - pairs: Sequence[Tuple[str, Any]], json_options: JSONOptions = DEFAULT_JSON_OPTIONS -) -> Any: - return object_hook(json_options.document_class(pairs), json_options) # type:ignore[call-arg] - - -def object_hook(dct: Mapping[str, Any], json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any: - match = None - for k in dct: - if k in _PARSERS_SET: - match = k - break - if match: - return _PARSERS[match](dct, json_options) - return dct - - -def _parse_legacy_regex(doc: Any, dummy0: Any) -> Any: - pattern = doc["$regex"] - # Check if this is the $regex query operator. - if not isinstance(pattern, (str, bytes)): - return doc - flags = 0 - # PyMongo always adds $options but some other tools may not. - for opt in doc.get("$options", ""): - flags |= _RE_OPT_TABLE.get(opt, 0) - return Regex(pattern, flags) - - -def _parse_legacy_uuid(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: - """Decode a JSON legacy $uuid to Python UUID.""" - if len(doc) != 1: - raise TypeError(f"Bad $uuid, extra field(s): {doc}") - if not isinstance(doc["$uuid"], str): - raise TypeError(f"$uuid must be a string: {doc}") - if json_options.uuid_representation == UuidRepresentation.UNSPECIFIED: - return Binary.from_uuid(uuid.UUID(doc["$uuid"])) - else: - return uuid.UUID(doc["$uuid"]) - - -def _binary_or_uuid(data: Any, subtype: int, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: - # special handling for UUID - if subtype in ALL_UUID_SUBTYPES: - uuid_representation = json_options.uuid_representation - binary_value = Binary(data, subtype) - if uuid_representation == UuidRepresentation.UNSPECIFIED: - return binary_value - if subtype == UUID_SUBTYPE: - # Legacy behavior: use STANDARD with binary subtype 4. - uuid_representation = UuidRepresentation.STANDARD - elif uuid_representation == UuidRepresentation.STANDARD: - # subtype == OLD_UUID_SUBTYPE - # Legacy behavior: STANDARD is the same as PYTHON_LEGACY. - uuid_representation = UuidRepresentation.PYTHON_LEGACY - return binary_value.as_uuid(uuid_representation) - - if subtype == 0: - return cast(uuid.UUID, data) - return Binary(data, subtype) - - -def _parse_legacy_binary(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: - if isinstance(doc["$type"], int): - doc["$type"] = "%02x" % doc["$type"] - subtype = int(doc["$type"], 16) - if subtype >= 0xFFFFFF80: # Handle mongoexport values - subtype = int(doc["$type"][6:], 16) - data = base64.b64decode(doc["$binary"].encode()) - return _binary_or_uuid(data, subtype, json_options) - - -def _parse_canonical_binary(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: - binary = doc["$binary"] - b64 = binary["base64"] - subtype = binary["subType"] - if not isinstance(b64, str): - raise TypeError(f"$binary base64 must be a string: {doc}") - if not isinstance(subtype, str) or len(subtype) > 2: - raise TypeError(f"$binary subType must be a string at most 2 characters: {doc}") - if len(binary) != 2: - raise TypeError(f'$binary must include only "base64" and "subType" components: {doc}') - - data = base64.b64decode(b64.encode()) - return _binary_or_uuid(data, int(subtype, 16), json_options) - - -def _parse_canonical_datetime( - doc: Any, json_options: JSONOptions -) -> Union[datetime.datetime, DatetimeMS]: - """Decode a JSON datetime to python datetime.datetime.""" - dtm = doc["$date"] - if len(doc) != 1: - raise TypeError(f"Bad $date, extra field(s): {doc}") - # mongoexport 2.6 and newer - if isinstance(dtm, str): - try: - # Parse offset - if dtm[-1] == "Z": - dt = dtm[:-1] - offset = "Z" - elif dtm[-6] in ("+", "-") and dtm[-3] == ":": - # (+|-)HH:MM - dt = dtm[:-6] - offset = dtm[-6:] - elif dtm[-5] in ("+", "-"): - # (+|-)HHMM - dt = dtm[:-5] - offset = dtm[-5:] - elif dtm[-3] in ("+", "-"): - # (+|-)HH - dt = dtm[:-3] - offset = dtm[-3:] - else: - dt = dtm - offset = "" - except IndexError as exc: - raise ValueError(f"time data {dtm!r} does not match ISO-8601 datetime format") from exc - - # Parse the optional factional seconds portion. - dot_index = dt.rfind(".") - microsecond = 0 - if dot_index != -1: - microsecond = int(float(dt[dot_index:]) * 1000000) - dt = dt[:dot_index] - - aware = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S").replace( - microsecond=microsecond, tzinfo=utc - ) - - if offset and offset != "Z": - if len(offset) == 6: - hours, minutes = offset[1:].split(":") - secs = int(hours) * 3600 + int(minutes) * 60 - elif len(offset) == 5: - secs = int(offset[1:3]) * 3600 + int(offset[3:]) * 60 - elif len(offset) == 3: - secs = int(offset[1:3]) * 3600 - if offset[0] == "-": - secs *= -1 - aware = aware - datetime.timedelta(seconds=secs) - - if json_options.tz_aware: - if json_options.tzinfo: - aware = aware.astimezone(json_options.tzinfo) - if json_options.datetime_conversion == DatetimeConversion.DATETIME_MS: - return DatetimeMS(aware) - return aware - else: - aware_tzinfo_none = aware.replace(tzinfo=None) - if json_options.datetime_conversion == DatetimeConversion.DATETIME_MS: - return DatetimeMS(aware_tzinfo_none) - return aware_tzinfo_none - return _millis_to_datetime(int(dtm), cast("CodecOptions[Any]", json_options)) - - -def _parse_canonical_oid(doc: Any, dummy0: Any) -> ObjectId: - """Decode a JSON ObjectId to bson.objectid.ObjectId.""" - if len(doc) != 1: - raise TypeError(f"Bad $oid, extra field(s): {doc}") - return ObjectId(doc["$oid"]) - - -def _parse_canonical_symbol(doc: Any, dummy0: Any) -> str: - """Decode a JSON symbol to Python string.""" - symbol = doc["$symbol"] - if len(doc) != 1: - raise TypeError(f"Bad $symbol, extra field(s): {doc}") - return str(symbol) - - -def _parse_canonical_code(doc: Any, dummy0: Any) -> Code: - """Decode a JSON code to bson.code.Code.""" - for key in doc: - if key not in ("$code", "$scope"): - raise TypeError(f"Bad $code, extra field(s): {doc}") - return Code(doc["$code"], scope=doc.get("$scope")) - - -def _parse_canonical_regex(doc: Any, dummy0: Any) -> Regex[str]: - """Decode a JSON regex to bson.regex.Regex.""" - regex = doc["$regularExpression"] - if len(doc) != 1: - raise TypeError(f"Bad $regularExpression, extra field(s): {doc}") - if len(regex) != 2: - raise TypeError( - f'Bad $regularExpression must include only "pattern and "options" components: {doc}' - ) - opts = regex["options"] - if not isinstance(opts, str): - raise TypeError( - "Bad $regularExpression options, options must be string, was type %s" % (type(opts)) - ) - return Regex(regex["pattern"], opts) - - -def _parse_canonical_dbref(doc: Any, dummy0: Any) -> Any: - """Decode a JSON DBRef to bson.dbref.DBRef.""" - if ( - isinstance(doc.get("$ref"), str) - and "$id" in doc - and isinstance(doc.get("$db"), (str, type(None))) - ): - return DBRef(doc.pop("$ref"), doc.pop("$id"), database=doc.pop("$db", None), **doc) - return doc - - -def _parse_canonical_dbpointer(doc: Any, dummy0: Any) -> Any: - """Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef.""" - dbref = doc["$dbPointer"] - if len(doc) != 1: - raise TypeError(f"Bad $dbPointer, extra field(s): {doc}") - if isinstance(dbref, DBRef): - dbref_doc = dbref.as_doc() - # DBPointer must not contain $db in its value. - if dbref.database is not None: - raise TypeError(f"Bad $dbPointer, extra field $db: {dbref_doc}") - if not isinstance(dbref.id, ObjectId): - raise TypeError(f"Bad $dbPointer, $id must be an ObjectId: {dbref_doc}") - if len(dbref_doc) != 2: - raise TypeError(f"Bad $dbPointer, extra field(s) in DBRef: {dbref_doc}") - return dbref - else: - raise TypeError(f"Bad $dbPointer, expected a DBRef: {doc}") - - -def _parse_canonical_int32(doc: Any, dummy0: Any) -> int: - """Decode a JSON int32 to python int.""" - i_str = doc["$numberInt"] - if len(doc) != 1: - raise TypeError(f"Bad $numberInt, extra field(s): {doc}") - if not isinstance(i_str, str): - raise TypeError(f"$numberInt must be string: {doc}") - return int(i_str) - - -def _parse_canonical_int64(doc: Any, dummy0: Any) -> Int64: - """Decode a JSON int64 to bson.int64.Int64.""" - l_str = doc["$numberLong"] - if len(doc) != 1: - raise TypeError(f"Bad $numberLong, extra field(s): {doc}") - return Int64(l_str) - - -def _parse_canonical_double(doc: Any, dummy0: Any) -> float: - """Decode a JSON double to python float.""" - d_str = doc["$numberDouble"] - if len(doc) != 1: - raise TypeError(f"Bad $numberDouble, extra field(s): {doc}") - if not isinstance(d_str, str): - raise TypeError(f"$numberDouble must be string: {doc}") - return float(d_str) - - -def _parse_canonical_decimal128(doc: Any, dummy0: Any) -> Decimal128: - """Decode a JSON decimal128 to bson.decimal128.Decimal128.""" - d_str = doc["$numberDecimal"] - if len(doc) != 1: - raise TypeError(f"Bad $numberDecimal, extra field(s): {doc}") - if not isinstance(d_str, str): - raise TypeError(f"$numberDecimal must be string: {doc}") - return Decimal128(d_str) - - -def _parse_canonical_minkey(doc: Any, dummy0: Any) -> MinKey: - """Decode a JSON MinKey to bson.min_key.MinKey.""" - if type(doc["$minKey"]) is not int or doc["$minKey"] != 1: # noqa: E721 - raise TypeError(f"$minKey value must be 1: {doc}") - if len(doc) != 1: - raise TypeError(f"Bad $minKey, extra field(s): {doc}") - return MinKey() - - -def _parse_canonical_maxkey(doc: Any, dummy0: Any) -> MaxKey: - """Decode a JSON MaxKey to bson.max_key.MaxKey.""" - if type(doc["$maxKey"]) is not int or doc["$maxKey"] != 1: # noqa: E721 - raise TypeError("$maxKey value must be 1: %s", (doc,)) - if len(doc) != 1: - raise TypeError(f"Bad $minKey, extra field(s): {doc}") - return MaxKey() - - -def _parse_binary(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: - if "$type" in doc: - return _parse_legacy_binary(doc, json_options) - else: - return _parse_canonical_binary(doc, json_options) - - -def _parse_timestamp(doc: Any, dummy0: Any) -> Timestamp: - tsp = doc["$timestamp"] - return Timestamp(tsp["t"], tsp["i"]) - - -_PARSERS: dict[str, Callable[[Any, JSONOptions], Any]] = { - "$oid": _parse_canonical_oid, - "$ref": _parse_canonical_dbref, - "$date": _parse_canonical_datetime, - "$regex": _parse_legacy_regex, - "$minKey": _parse_canonical_minkey, - "$maxKey": _parse_canonical_maxkey, - "$binary": _parse_binary, - "$code": _parse_canonical_code, - "$uuid": _parse_legacy_uuid, - "$undefined": lambda _, _1: None, - "$numberLong": _parse_canonical_int64, - "$timestamp": _parse_timestamp, - "$numberDecimal": _parse_canonical_decimal128, - "$dbPointer": _parse_canonical_dbpointer, - "$regularExpression": _parse_canonical_regex, - "$symbol": _parse_canonical_symbol, - "$numberInt": _parse_canonical_int32, - "$numberDouble": _parse_canonical_double, -} -_PARSERS_SET = set(_PARSERS) - - -def _encode_binary(data: bytes, subtype: int, json_options: JSONOptions) -> Any: - if json_options.json_mode == JSONMode.LEGACY: - return {"$binary": base64.b64encode(data).decode(), "$type": "%02x" % subtype} - return {"$binary": {"base64": base64.b64encode(data).decode(), "subType": "%02x" % subtype}} - - -def _encode_datetimems(obj: Any, json_options: JSONOptions) -> dict: - if ( - json_options.datetime_representation == DatetimeRepresentation.ISO8601 - and 0 <= int(obj) <= _MAX_UTC_MS - ): - return _encode_datetime(obj.as_datetime(), json_options) - elif json_options.datetime_representation == DatetimeRepresentation.LEGACY: - return {"$date": int(obj)} - return {"$date": {"$numberLong": str(int(obj))}} - - -def _encode_code(obj: Code, json_options: JSONOptions) -> dict: - if obj.scope is None: - return {"$code": str(obj)} - else: - return {"$code": str(obj), "$scope": _json_convert(obj.scope, json_options)} - - -def _encode_int64(obj: Int64, json_options: JSONOptions) -> Any: - if json_options.strict_number_long: - return {"$numberLong": str(obj)} - else: - return int(obj) - - -def _encode_noop(obj: Any, dummy0: Any) -> Any: - return obj - - -def _encode_regex(obj: Any, json_options: JSONOptions) -> dict: - flags = "" - if obj.flags & re.IGNORECASE: - flags += "i" - if obj.flags & re.LOCALE: - flags += "l" - if obj.flags & re.MULTILINE: - flags += "m" - if obj.flags & re.DOTALL: - flags += "s" - if obj.flags & re.UNICODE: - flags += "u" - if obj.flags & re.VERBOSE: - flags += "x" - if isinstance(obj.pattern, str): - pattern = obj.pattern - else: - pattern = obj.pattern.decode("utf-8") - if json_options.json_mode == JSONMode.LEGACY: - return {"$regex": pattern, "$options": flags} - return {"$regularExpression": {"pattern": pattern, "options": flags}} - - -def _encode_int(obj: int, json_options: JSONOptions) -> Any: - if json_options.json_mode == JSONMode.CANONICAL: - if -_INT32_MAX <= obj < _INT32_MAX: - return {"$numberInt": str(obj)} - return {"$numberLong": str(obj)} - return obj - - -def _encode_float(obj: float, json_options: JSONOptions) -> Any: - if json_options.json_mode != JSONMode.LEGACY: - if math.isnan(obj): - return {"$numberDouble": "NaN"} - elif math.isinf(obj): - representation = "Infinity" if obj > 0 else "-Infinity" - return {"$numberDouble": representation} - elif json_options.json_mode == JSONMode.CANONICAL: - # repr() will return the shortest string guaranteed to produce the - # original value, when float() is called on it. - return {"$numberDouble": str(repr(obj))} - return obj - - -def _encode_datetime(obj: datetime.datetime, json_options: JSONOptions) -> dict: - if json_options.datetime_representation == DatetimeRepresentation.ISO8601: - if not obj.tzinfo: - obj = obj.replace(tzinfo=utc) - assert obj.tzinfo is not None - if obj >= EPOCH_AWARE: - off = obj.tzinfo.utcoffset(obj) - if (off.days, off.seconds, off.microseconds) == (0, 0, 0): # type: ignore - tz_string = "Z" - else: - tz_string = obj.strftime("%z") - millis = int(obj.microsecond / 1000) - fracsecs = ".%03d" % (millis,) if millis else "" - return { - "$date": "{}{}{}".format(obj.strftime("%Y-%m-%dT%H:%M:%S"), fracsecs, tz_string) - } - - millis = _datetime_to_millis(obj) - if json_options.datetime_representation == DatetimeRepresentation.LEGACY: - return {"$date": millis} - return {"$date": {"$numberLong": str(millis)}} - - -def _encode_bytes(obj: bytes, json_options: JSONOptions) -> dict: - return _encode_binary(obj, 0, json_options) - - -def _encode_binary_obj(obj: Binary, json_options: JSONOptions) -> dict: - return _encode_binary(obj, obj.subtype, json_options) - - -def _encode_uuid(obj: uuid.UUID, json_options: JSONOptions) -> dict: - if json_options.strict_uuid: - binval = Binary.from_uuid(obj, uuid_representation=json_options.uuid_representation) - return _encode_binary(binval, binval.subtype, json_options) - else: - return {"$uuid": obj.hex} - - -def _encode_objectid(obj: ObjectId, dummy0: Any) -> dict: - return {"$oid": str(obj)} - - -def _encode_timestamp(obj: Timestamp, dummy0: Any) -> dict: - return {"$timestamp": {"t": obj.time, "i": obj.inc}} - - -def _encode_decimal128(obj: Timestamp, dummy0: Any) -> dict: - return {"$numberDecimal": str(obj)} - - -def _encode_dbref(obj: DBRef, json_options: JSONOptions) -> dict: - return _json_convert(obj.as_doc(), json_options=json_options) - - -def _encode_minkey(dummy0: Any, dummy1: Any) -> dict: - return {"$minKey": 1} - - -def _encode_maxkey(dummy0: Any, dummy1: Any) -> dict: - return {"$maxKey": 1} - - -# Encoders for BSON types -# Each encoder function's signature is: -# - obj: a Python data type, e.g. a Python int for _encode_int -# - json_options: a JSONOptions -_ENCODERS: dict[Type, Callable[[Any, JSONOptions], Any]] = { - bool: _encode_noop, - bytes: _encode_bytes, - datetime.datetime: _encode_datetime, - DatetimeMS: _encode_datetimems, - float: _encode_float, - int: _encode_int, - str: _encode_noop, - type(None): _encode_noop, - uuid.UUID: _encode_uuid, - Binary: _encode_binary_obj, - Int64: _encode_int64, - Code: _encode_code, - DBRef: _encode_dbref, - MaxKey: _encode_maxkey, - MinKey: _encode_minkey, - ObjectId: _encode_objectid, - Regex: _encode_regex, - RE_TYPE: _encode_regex, - Timestamp: _encode_timestamp, - Decimal128: _encode_decimal128, -} - -# Map each _type_marker to its encoder for faster lookup. -_MARKERS: dict[int, Callable[[Any, JSONOptions], Any]] = {} -for _typ in _ENCODERS: - if hasattr(_typ, "_type_marker"): - _MARKERS[_typ._type_marker] = _ENCODERS[_typ] - -_BUILT_IN_TYPES = tuple(t for t in _ENCODERS) - - -def default(obj: Any, json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any: - # First see if the type is already cached. KeyError will only ever - # happen once per subtype. - try: - return _ENCODERS[type(obj)](obj, json_options) - except KeyError: - pass - - # Second, fall back to trying _type_marker. This has to be done - # before the loop below since users could subclass one of our - # custom types that subclasses a python built-in (e.g. Binary) - if hasattr(obj, "_type_marker"): - marker = obj._type_marker - if marker in _MARKERS: - func = _MARKERS[marker] - # Cache this type for faster subsequent lookup. - _ENCODERS[type(obj)] = func - return func(obj, json_options) - - # Third, test each base type. This will only happen once for - # a subtype of a supported base type. - for base in _BUILT_IN_TYPES: - if isinstance(obj, base): - func = _ENCODERS[base] - # Cache this type for faster subsequent lookup. - _ENCODERS[type(obj)] = func - return func(obj, json_options) - - raise TypeError("%r is not JSON serializable" % obj) - - -def _get_str_size(obj: Any) -> int: - return len(obj) - - -def _get_datetime_size(obj: datetime.datetime) -> int: - return 5 + len(str(obj.time())) - - -def _get_regex_size(obj: Regex) -> int: - return 18 + len(obj.pattern) - - -def _get_dbref_size(obj: DBRef) -> int: - return 34 + len(obj.collection) - - -_CONSTANT_SIZE_TABLE: dict[Any, int] = { - ObjectId: 28, - int: 11, - Int64: 11, - Decimal128: 11, - Timestamp: 14, - MinKey: 8, - MaxKey: 8, -} - -_VARIABLE_SIZE_TABLE: dict[Any, Callable[[Any], int]] = { - str: _get_str_size, - bytes: _get_str_size, - datetime.datetime: _get_datetime_size, - Regex: _get_regex_size, - DBRef: _get_dbref_size, -} - - -def get_size(obj: Any, max_size: int, current_size: int = 0) -> int: - """Recursively finds size of objects""" - if current_size >= max_size: - return current_size - - obj_type = type(obj) - - # Check to see if the obj has a constant size estimate - try: - return _CONSTANT_SIZE_TABLE[obj_type] - except KeyError: - pass - - # Check to see if the obj has a variable but simple size estimate - try: - return _VARIABLE_SIZE_TABLE[obj_type](obj) - except KeyError: - pass - - # Special cases that require recursion - if obj_type == Code: - if obj.scope: - current_size += ( - 5 + get_size(obj.scope, max_size, current_size) + len(obj) - len(obj.scope) - ) - else: - current_size += 5 + len(obj) - elif obj_type == dict: - for k, v in obj.items(): - current_size += get_size(k, max_size, current_size) - current_size += get_size(v, max_size, current_size) - if current_size >= max_size: - return current_size - elif hasattr(obj, "__iter__"): - for i in obj: - current_size += get_size(i, max_size, current_size) - if current_size >= max_size: - return current_size - return current_size - - -def _truncate_documents(obj: Any, max_length: int) -> Tuple[Any, int]: - """Recursively truncate documents as needed to fit inside max_length characters.""" - if max_length <= 0: - return None, 0 - remaining = max_length - if hasattr(obj, "items"): - truncated: Any = {} - for k, v in obj.items(): - truncated_v, remaining = _truncate_documents(v, remaining) - if truncated_v: - truncated[k] = truncated_v - if remaining <= 0: - break - return truncated, remaining - elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes)): - truncated: Any = [] # type:ignore[no-redef] - for v in obj: - truncated_v, remaining = _truncate_documents(v, remaining) - if truncated_v: - truncated.append(truncated_v) - if remaining <= 0: - break - return truncated, remaining - else: - return _truncate(obj, remaining) - - -def _truncate(obj: Any, remaining: int) -> Tuple[Any, int]: - size = get_size(obj, remaining) - - if size <= remaining: - return obj, remaining - size - else: - try: - truncated = obj[:remaining] - except TypeError: - truncated = obj - return truncated, remaining - size diff --git a/venv/lib/python3.12/site-packages/bson/max_key.py b/venv/lib/python3.12/site-packages/bson/max_key.py deleted file mode 100644 index 445e12f5..00000000 --- a/venv/lib/python3.12/site-packages/bson/max_key.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2010-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Representation for the MongoDB internal MaxKey type.""" -from __future__ import annotations - -from typing import Any - - -class MaxKey: - """MongoDB internal MaxKey type.""" - - __slots__ = () - - _type_marker = 127 - - def __getstate__(self) -> Any: - return {} - - def __setstate__(self, state: Any) -> None: - pass - - def __eq__(self, other: Any) -> bool: - return isinstance(other, MaxKey) - - def __hash__(self) -> int: - return hash(self._type_marker) - - def __ne__(self, other: Any) -> bool: - return not self == other - - def __le__(self, other: Any) -> bool: - return isinstance(other, MaxKey) - - def __lt__(self, dummy: Any) -> bool: - return False - - def __ge__(self, dummy: Any) -> bool: - return True - - def __gt__(self, other: Any) -> bool: - return not isinstance(other, MaxKey) - - def __repr__(self) -> str: - return "MaxKey()" diff --git a/venv/lib/python3.12/site-packages/bson/min_key.py b/venv/lib/python3.12/site-packages/bson/min_key.py deleted file mode 100644 index 37828dcf..00000000 --- a/venv/lib/python3.12/site-packages/bson/min_key.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2010-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Representation for the MongoDB internal MinKey type.""" -from __future__ import annotations - -from typing import Any - - -class MinKey: - """MongoDB internal MinKey type.""" - - __slots__ = () - - _type_marker = 255 - - def __getstate__(self) -> Any: - return {} - - def __setstate__(self, state: Any) -> None: - pass - - def __eq__(self, other: Any) -> bool: - return isinstance(other, MinKey) - - def __hash__(self) -> int: - return hash(self._type_marker) - - def __ne__(self, other: Any) -> bool: - return not self == other - - def __le__(self, dummy: Any) -> bool: - return True - - def __lt__(self, other: Any) -> bool: - return not isinstance(other, MinKey) - - def __ge__(self, other: Any) -> bool: - return isinstance(other, MinKey) - - def __gt__(self, dummy: Any) -> bool: - return False - - def __repr__(self) -> str: - return "MinKey()" diff --git a/venv/lib/python3.12/site-packages/bson/objectid.py b/venv/lib/python3.12/site-packages/bson/objectid.py deleted file mode 100644 index 970c4e52..00000000 --- a/venv/lib/python3.12/site-packages/bson/objectid.py +++ /dev/null @@ -1,274 +0,0 @@ -# Copyright 2009-2015 MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for working with MongoDB ObjectIds.""" -from __future__ import annotations - -import binascii -import datetime -import os -import struct -import threading -import time -from random import SystemRandom -from typing import Any, NoReturn, Optional, Type, Union - -from bson.datetime_ms import _datetime_to_millis -from bson.errors import InvalidId -from bson.tz_util import utc - -_MAX_COUNTER_VALUE = 0xFFFFFF -_PACK_INT = struct.Struct(">I").pack -_PACK_INT_RANDOM = struct.Struct(">I5s").pack -_UNPACK_INT = struct.Struct(">I").unpack - - -def _raise_invalid_id(oid: str) -> NoReturn: - raise InvalidId( - "%r is not a valid ObjectId, it must be a 12-byte input" - " or a 24-character hex string" % oid - ) - - -def _random_bytes() -> bytes: - """Get the 5-byte random field of an ObjectId.""" - return os.urandom(5) - - -class ObjectId: - """A MongoDB ObjectId.""" - - _pid = os.getpid() - - _inc = SystemRandom().randint(0, _MAX_COUNTER_VALUE) - _inc_lock = threading.Lock() - - __random = _random_bytes() - - __slots__ = ("__id",) - - _type_marker = 7 - - def __init__(self, oid: Optional[Union[str, ObjectId, bytes]] = None) -> None: - """Initialize a new ObjectId. - - An ObjectId is a 12-byte unique identifier consisting of: - - - a 4-byte value representing the seconds since the Unix epoch, - - a 5-byte random value, - - a 3-byte counter, starting with a random value. - - By default, ``ObjectId()`` creates a new unique identifier. The - optional parameter `oid` can be an :class:`ObjectId`, or any 12 - :class:`bytes`. - - For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId - specification but they are acceptable input:: - - >>> ObjectId(b'foo-bar-quux') - ObjectId('666f6f2d6261722d71757578') - - `oid` can also be a :class:`str` of 24 hex digits:: - - >>> ObjectId('0123456789ab0123456789ab') - ObjectId('0123456789ab0123456789ab') - - Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor - 24 hex digits, or :class:`TypeError` if `oid` is not an accepted type. - - :param oid: a valid ObjectId. - - .. seealso:: The MongoDB documentation on `ObjectIds `_. - - .. versionchanged:: 3.8 - :class:`~bson.objectid.ObjectId` now implements the `ObjectID - specification version 0.2 - `_. - """ - if oid is None: - self.__generate() - elif isinstance(oid, bytes) and len(oid) == 12: - self.__id = oid - else: - self.__validate(oid) - - @classmethod - def from_datetime(cls: Type[ObjectId], generation_time: datetime.datetime) -> ObjectId: - """Create a dummy ObjectId instance with a specific generation time. - - This method is useful for doing range queries on a field - containing :class:`ObjectId` instances. - - .. warning:: - It is not safe to insert a document containing an ObjectId - generated using this method. This method deliberately - eliminates the uniqueness guarantee that ObjectIds - generally provide. ObjectIds generated with this method - should be used exclusively in queries. - - `generation_time` will be converted to UTC. Naive datetime - instances will be treated as though they already contain UTC. - - An example using this helper to get documents where ``"_id"`` - was generated before January 1, 2010 would be: - - >>> gen_time = datetime.datetime(2010, 1, 1) - >>> dummy_id = ObjectId.from_datetime(gen_time) - >>> result = collection.find({"_id": {"$lt": dummy_id}}) - - :param generation_time: :class:`~datetime.datetime` to be used - as the generation time for the resulting ObjectId. - """ - oid = ( - _PACK_INT(_datetime_to_millis(generation_time) // 1000) - + b"\x00\x00\x00\x00\x00\x00\x00\x00" - ) - return cls(oid) - - @classmethod - def is_valid(cls: Type[ObjectId], oid: Any) -> bool: - """Checks if a `oid` string is valid or not. - - :param oid: the object id to validate - - .. versionadded:: 2.3 - """ - if not oid: - return False - - try: - ObjectId(oid) - return True - except (InvalidId, TypeError): - return False - - @classmethod - def _random(cls) -> bytes: - """Generate a 5-byte random number once per process.""" - pid = os.getpid() - if pid != cls._pid: - cls._pid = pid - cls.__random = _random_bytes() - return cls.__random - - def __generate(self) -> None: - """Generate a new value for this ObjectId.""" - with ObjectId._inc_lock: - inc = ObjectId._inc - ObjectId._inc = (inc + 1) % (_MAX_COUNTER_VALUE + 1) - - # 4 bytes current time, 5 bytes random, 3 bytes inc. - self.__id = _PACK_INT_RANDOM(int(time.time()), ObjectId._random()) + _PACK_INT(inc)[1:4] - - def __validate(self, oid: Any) -> None: - """Validate and use the given id for this ObjectId. - - Raises TypeError if id is not an instance of :class:`str`, - :class:`bytes`, or ObjectId. Raises InvalidId if it is not a - valid ObjectId. - - :param oid: a valid ObjectId - """ - if isinstance(oid, ObjectId): - self.__id = oid.binary - elif isinstance(oid, str): - if len(oid) == 24: - try: - self.__id = bytes.fromhex(oid) - except (TypeError, ValueError): - _raise_invalid_id(oid) - else: - _raise_invalid_id(oid) - else: - raise TypeError(f"id must be an instance of (bytes, str, ObjectId), not {type(oid)}") - - @property - def binary(self) -> bytes: - """12-byte binary representation of this ObjectId.""" - return self.__id - - @property - def generation_time(self) -> datetime.datetime: - """A :class:`datetime.datetime` instance representing the time of - generation for this :class:`ObjectId`. - - The :class:`datetime.datetime` is timezone aware, and - represents the generation time in UTC. It is precise to the - second. - """ - timestamp = _UNPACK_INT(self.__id[0:4])[0] - return datetime.datetime.fromtimestamp(timestamp, utc) - - def __getstate__(self) -> bytes: - """Return value of object for pickling. - needed explicitly because __slots__() defined. - """ - return self.__id - - def __setstate__(self, value: Any) -> None: - """Explicit state set from pickling""" - # Provide backwards compatibility with OIDs - # pickled with pymongo-1.9 or older. - if isinstance(value, dict): - oid = value["_ObjectId__id"] - else: - oid = value - # ObjectIds pickled in python 2.x used `str` for __id. - # In python 3.x this has to be converted to `bytes` - # by encoding latin-1. - if isinstance(oid, str): - self.__id = oid.encode("latin-1") - else: - self.__id = oid - - def __str__(self) -> str: - return binascii.hexlify(self.__id).decode() - - def __repr__(self) -> str: - return f"ObjectId('{self!s}')" - - def __eq__(self, other: Any) -> bool: - if isinstance(other, ObjectId): - return self.__id == other.binary - return NotImplemented - - def __ne__(self, other: Any) -> bool: - if isinstance(other, ObjectId): - return self.__id != other.binary - return NotImplemented - - def __lt__(self, other: Any) -> bool: - if isinstance(other, ObjectId): - return self.__id < other.binary - return NotImplemented - - def __le__(self, other: Any) -> bool: - if isinstance(other, ObjectId): - return self.__id <= other.binary - return NotImplemented - - def __gt__(self, other: Any) -> bool: - if isinstance(other, ObjectId): - return self.__id > other.binary - return NotImplemented - - def __ge__(self, other: Any) -> bool: - if isinstance(other, ObjectId): - return self.__id >= other.binary - return NotImplemented - - def __hash__(self) -> int: - """Get a hash value for this :class:`ObjectId`.""" - return hash(self.__id) diff --git a/venv/lib/python3.12/site-packages/bson/py.typed b/venv/lib/python3.12/site-packages/bson/py.typed deleted file mode 100644 index 0f405706..00000000 --- a/venv/lib/python3.12/site-packages/bson/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# PEP-561 Support File. -# "Package maintainers who wish to support type checking of their code MUST add a marker file named py.typed to their package supporting typing". diff --git a/venv/lib/python3.12/site-packages/bson/raw_bson.py b/venv/lib/python3.12/site-packages/bson/raw_bson.py deleted file mode 100644 index 2ce53143..00000000 --- a/venv/lib/python3.12/site-packages/bson/raw_bson.py +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright 2015-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for representing raw BSON documents. - -Inserting and Retrieving RawBSONDocuments -========================================= - -Example: Moving a document between different databases/collections - -.. doctest:: - - >>> import bson - >>> from pymongo import MongoClient - >>> from bson.raw_bson import RawBSONDocument - >>> client = MongoClient(document_class=RawBSONDocument) - >>> client.drop_database("db") - >>> client.drop_database("replica_db") - >>> db = client.db - >>> result = db.test.insert_many( - ... [{"_id": 1, "a": 1}, {"_id": 2, "b": 1}, {"_id": 3, "c": 1}, {"_id": 4, "d": 1}] - ... ) - >>> replica_db = client.replica_db - >>> for doc in db.test.find(): - ... print(f"raw document: {doc.raw}") - ... print(f"decoded document: {bson.decode(doc.raw)}") - ... result = replica_db.test.insert_one(doc) - ... - raw document: b'...' - decoded document: {'_id': 1, 'a': 1} - raw document: b'...' - decoded document: {'_id': 2, 'b': 1} - raw document: b'...' - decoded document: {'_id': 3, 'c': 1} - raw document: b'...' - decoded document: {'_id': 4, 'd': 1} - -For use cases like moving documents across different databases or writing binary -blobs to disk, using raw BSON documents provides better speed and avoids the -overhead of decoding or encoding BSON. -""" -from __future__ import annotations - -from typing import Any, ItemsView, Iterator, Mapping, Optional - -from bson import _get_object_size, _raw_to_dict -from bson.codec_options import _RAW_BSON_DOCUMENT_MARKER, CodecOptions -from bson.codec_options import DEFAULT_CODEC_OPTIONS as DEFAULT - - -def _inflate_bson( - bson_bytes: bytes, codec_options: CodecOptions[RawBSONDocument], raw_array: bool = False -) -> dict[str, Any]: - """Inflates the top level fields of a BSON document. - - :param bson_bytes: the BSON bytes that compose this document - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions` whose ``document_class`` - must be :class:`RawBSONDocument`. - """ - return _raw_to_dict(bson_bytes, 4, len(bson_bytes) - 1, codec_options, {}, raw_array=raw_array) - - -class RawBSONDocument(Mapping[str, Any]): - """Representation for a MongoDB document that provides access to the raw - BSON bytes that compose it. - - Only when a field is accessed or modified within the document does - RawBSONDocument decode its bytes. - """ - - __slots__ = ("__raw", "__inflated_doc", "__codec_options") - _type_marker = _RAW_BSON_DOCUMENT_MARKER - __codec_options: CodecOptions[RawBSONDocument] - - def __init__( - self, bson_bytes: bytes, codec_options: Optional[CodecOptions[RawBSONDocument]] = None - ) -> None: - """Create a new :class:`RawBSONDocument` - - :class:`RawBSONDocument` is a representation of a BSON document that - provides access to the underlying raw BSON bytes. Only when a field is - accessed or modified within the document does RawBSONDocument decode - its bytes. - - :class:`RawBSONDocument` implements the ``Mapping`` abstract base - class from the standard library so it can be used like a read-only - ``dict``:: - - >>> from bson import encode - >>> raw_doc = RawBSONDocument(encode({'_id': 'my_doc'})) - >>> raw_doc.raw - b'...' - >>> raw_doc['_id'] - 'my_doc' - - :param bson_bytes: the BSON bytes that compose this document - :param codec_options: An instance of - :class:`~bson.codec_options.CodecOptions` whose ``document_class`` - must be :class:`RawBSONDocument`. The default is - :attr:`DEFAULT_RAW_BSON_OPTIONS`. - - .. versionchanged:: 3.8 - :class:`RawBSONDocument` now validates that the ``bson_bytes`` - passed in represent a single bson document. - - .. versionchanged:: 3.5 - If a :class:`~bson.codec_options.CodecOptions` is passed in, its - `document_class` must be :class:`RawBSONDocument`. - """ - self.__raw = bson_bytes - self.__inflated_doc: Optional[Mapping[str, Any]] = None - # Can't default codec_options to DEFAULT_RAW_BSON_OPTIONS in signature, - # it refers to this class RawBSONDocument. - if codec_options is None: - codec_options = DEFAULT_RAW_BSON_OPTIONS - elif not issubclass(codec_options.document_class, RawBSONDocument): - raise TypeError( - "RawBSONDocument cannot use CodecOptions with document " - f"class {codec_options.document_class}" - ) - self.__codec_options = codec_options - # Validate the bson object size. - _get_object_size(bson_bytes, 0, len(bson_bytes)) - - @property - def raw(self) -> bytes: - """The raw BSON bytes composing this document.""" - return self.__raw - - def items(self) -> ItemsView[str, Any]: - """Lazily decode and iterate elements in this document.""" - return self.__inflated.items() - - @property - def __inflated(self) -> Mapping[str, Any]: - if self.__inflated_doc is None: - # We already validated the object's size when this document was - # created, so no need to do that again. - self.__inflated_doc = self._inflate_bson(self.__raw, self.__codec_options) - return self.__inflated_doc - - @staticmethod - def _inflate_bson( - bson_bytes: bytes, codec_options: CodecOptions[RawBSONDocument] - ) -> Mapping[str, Any]: - return _inflate_bson(bson_bytes, codec_options) - - def __getitem__(self, item: str) -> Any: - return self.__inflated[item] - - def __iter__(self) -> Iterator[str]: - return iter(self.__inflated) - - def __len__(self) -> int: - return len(self.__inflated) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, RawBSONDocument): - return self.__raw == other.raw - return NotImplemented - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.raw!r}, codec_options={self.__codec_options!r})" - - -class _RawArrayBSONDocument(RawBSONDocument): - """A RawBSONDocument that only expands sub-documents and arrays when accessed.""" - - @staticmethod - def _inflate_bson( - bson_bytes: bytes, codec_options: CodecOptions[RawBSONDocument] - ) -> Mapping[str, Any]: - return _inflate_bson(bson_bytes, codec_options, raw_array=True) - - -DEFAULT_RAW_BSON_OPTIONS: CodecOptions[RawBSONDocument] = DEFAULT.with_options( - document_class=RawBSONDocument -) -_RAW_ARRAY_BSON_OPTIONS: CodecOptions[_RawArrayBSONDocument] = DEFAULT.with_options( - document_class=_RawArrayBSONDocument -) -"""The default :class:`~bson.codec_options.CodecOptions` for -:class:`RawBSONDocument`. -""" diff --git a/venv/lib/python3.12/site-packages/bson/regex.py b/venv/lib/python3.12/site-packages/bson/regex.py deleted file mode 100644 index 60cff4fd..00000000 --- a/venv/lib/python3.12/site-packages/bson/regex.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright 2013-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for representing MongoDB regular expressions.""" -from __future__ import annotations - -import re -from typing import Any, Generic, Pattern, Type, TypeVar, Union - -from bson._helpers import _getstate_slots, _setstate_slots -from bson.son import RE_TYPE - - -def str_flags_to_int(str_flags: str) -> int: - flags = 0 - if "i" in str_flags: - flags |= re.IGNORECASE - if "l" in str_flags: - flags |= re.LOCALE - if "m" in str_flags: - flags |= re.MULTILINE - if "s" in str_flags: - flags |= re.DOTALL - if "u" in str_flags: - flags |= re.UNICODE - if "x" in str_flags: - flags |= re.VERBOSE - - return flags - - -_T = TypeVar("_T", str, bytes) - - -class Regex(Generic[_T]): - """BSON regular expression data.""" - - __slots__ = ("pattern", "flags") - - __getstate__ = _getstate_slots - __setstate__ = _setstate_slots - - _type_marker = 11 - - @classmethod - def from_native(cls: Type[Regex[Any]], regex: Pattern[_T]) -> Regex[_T]: - """Convert a Python regular expression into a ``Regex`` instance. - - Note that in Python 3, a regular expression compiled from a - :class:`str` has the ``re.UNICODE`` flag set. If it is undesirable - to store this flag in a BSON regular expression, unset it first:: - - >>> pattern = re.compile('.*') - >>> regex = Regex.from_native(pattern) - >>> regex.flags ^= re.UNICODE - >>> db.collection.insert_one({'pattern': regex}) - - :param regex: A regular expression object from ``re.compile()``. - - .. warning:: - Python regular expressions use a different syntax and different - set of flags than MongoDB, which uses `PCRE`_. A regular - expression retrieved from the server may not compile in - Python, or may match a different set of strings in Python than - when used in a MongoDB query. - - .. _PCRE: http://www.pcre.org/ - """ - if not isinstance(regex, RE_TYPE): - raise TypeError("regex must be a compiled regular expression, not %s" % type(regex)) - - return Regex(regex.pattern, regex.flags) - - def __init__(self, pattern: _T, flags: Union[str, int] = 0) -> None: - """BSON regular expression data. - - This class is useful to store and retrieve regular expressions that are - incompatible with Python's regular expression dialect. - - :param pattern: string - :param flags: an integer bitmask, or a string of flag - characters like "im" for IGNORECASE and MULTILINE - """ - if not isinstance(pattern, (str, bytes)): - raise TypeError("pattern must be a string, not %s" % type(pattern)) - self.pattern: _T = pattern - - if isinstance(flags, str): - self.flags = str_flags_to_int(flags) - elif isinstance(flags, int): - self.flags = flags - else: - raise TypeError("flags must be a string or int, not %s" % type(flags)) - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Regex): - return self.pattern == other.pattern and self.flags == other.flags - else: - return NotImplemented - - __hash__ = None # type: ignore - - def __ne__(self, other: Any) -> bool: - return not self == other - - def __repr__(self) -> str: - return f"Regex({self.pattern!r}, {self.flags!r})" - - def try_compile(self) -> Pattern[_T]: - """Compile this :class:`Regex` as a Python regular expression. - - .. warning:: - Python regular expressions use a different syntax and different - set of flags than MongoDB, which uses `PCRE`_. A regular - expression retrieved from the server may not compile in - Python, or may match a different set of strings in Python than - when used in a MongoDB query. :meth:`try_compile()` may raise - :exc:`re.error`. - - .. _PCRE: http://www.pcre.org/ - """ - return re.compile(self.pattern, self.flags) diff --git a/venv/lib/python3.12/site-packages/bson/son.py b/venv/lib/python3.12/site-packages/bson/son.py deleted file mode 100644 index 24275fce..00000000 --- a/venv/lib/python3.12/site-packages/bson/son.py +++ /dev/null @@ -1,211 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for creating and manipulating SON, the Serialized Ocument Notation. - -Regular dictionaries can be used instead of SON objects, but not when the order -of keys is important. A SON object can be used just like a normal Python -dictionary. -""" -from __future__ import annotations - -import copy -import re -from collections.abc import Mapping as _Mapping -from typing import ( - Any, - Dict, - Iterable, - Iterator, - Mapping, - Optional, - Pattern, - Tuple, - Type, - TypeVar, - Union, - cast, -) - -# This sort of sucks, but seems to be as good as it gets... -# This is essentially the same as re._pattern_type -RE_TYPE: Type[Pattern[Any]] = type(re.compile("")) - -_Key = TypeVar("_Key") -_Value = TypeVar("_Value") -_T = TypeVar("_T") - - -class SON(Dict[_Key, _Value]): - """SON data. - - A subclass of dict that maintains ordering of keys and provides a - few extra niceties for dealing with SON. SON provides an API - similar to collections.OrderedDict. - """ - - __keys: list[Any] - - def __init__( - self, - data: Optional[Union[Mapping[_Key, _Value], Iterable[Tuple[_Key, _Value]]]] = None, - **kwargs: Any, - ) -> None: - self.__keys = [] - dict.__init__(self) - self.update(data) - self.update(kwargs) - - def __new__(cls: Type[SON[_Key, _Value]], *args: Any, **kwargs: Any) -> SON[_Key, _Value]: - instance = super().__new__(cls, *args, **kwargs) - instance.__keys = [] - return instance - - def __repr__(self) -> str: - result = [] - for key in self.__keys: - result.append(f"({key!r}, {self[key]!r})") - return "SON([%s])" % ", ".join(result) - - def __setitem__(self, key: _Key, value: _Value) -> None: - if key not in self.__keys: - self.__keys.append(key) - dict.__setitem__(self, key, value) - - def __delitem__(self, key: _Key) -> None: - self.__keys.remove(key) - dict.__delitem__(self, key) - - def copy(self) -> SON[_Key, _Value]: - other: SON[_Key, _Value] = SON() - other.update(self) - return other - - # TODO this is all from UserDict.DictMixin. it could probably be made more - # efficient. - # second level definitions support higher levels - def __iter__(self) -> Iterator[_Key]: - yield from self.__keys - - def has_key(self, key: _Key) -> bool: - return key in self.__keys - - def iterkeys(self) -> Iterator[_Key]: - return self.__iter__() - - # fourth level uses definitions from lower levels - def itervalues(self) -> Iterator[_Value]: - for _, v in self.items(): - yield v - - def values(self) -> list[_Value]: # type: ignore[override] - return [v for _, v in self.items()] - - def clear(self) -> None: - self.__keys = [] - super().clear() - - def setdefault(self, key: _Key, default: _Value) -> _Value: - try: - return self[key] - except KeyError: - self[key] = default - return default - - def pop(self, key: _Key, *args: Union[_Value, _T]) -> Union[_Value, _T]: - if len(args) > 1: - raise TypeError("pop expected at most 2 arguments, got " + repr(1 + len(args))) - try: - value = self[key] - except KeyError: - if args: - return args[0] - raise - del self[key] - return value - - def popitem(self) -> Tuple[_Key, _Value]: - try: - k, v = next(iter(self.items())) - except StopIteration: - raise KeyError("container is empty") from None - del self[k] - return (k, v) - - def update(self, other: Optional[Any] = None, **kwargs: _Value) -> None: # type: ignore[override] - # Make progressively weaker assumptions about "other" - if other is None: - pass - elif hasattr(other, "items"): - for k, v in other.items(): - self[k] = v - elif hasattr(other, "keys"): - for k in other: - self[k] = other[k] - else: - for k, v in other: - self[k] = v - if kwargs: - self.update(kwargs) - - def get( # type: ignore[override] - self, key: _Key, default: Optional[Union[_Value, _T]] = None - ) -> Union[_Value, _T, None]: - try: - return self[key] - except KeyError: - return default - - def __eq__(self, other: Any) -> bool: - """Comparison to another SON is order-sensitive while comparison to a - regular dictionary is order-insensitive. - """ - if isinstance(other, SON): - return len(self) == len(other) and list(self.items()) == list(other.items()) - return cast(bool, self.to_dict() == other) - - def __ne__(self, other: Any) -> bool: - return not self == other - - def __len__(self) -> int: - return len(self.__keys) - - def to_dict(self) -> dict[_Key, _Value]: - """Convert a SON document to a normal Python dictionary instance. - - This is trickier than just *dict(...)* because it needs to be - recursive. - """ - - def transform_value(value: Any) -> Any: - if isinstance(value, list): - return [transform_value(v) for v in value] - elif isinstance(value, _Mapping): - return {k: transform_value(v) for k, v in value.items()} - else: - return value - - return cast("dict[_Key, _Value]", transform_value(dict(self))) - - def __deepcopy__(self, memo: dict[int, SON[_Key, _Value]]) -> SON[_Key, _Value]: - out: SON[_Key, _Value] = SON() - val_id = id(self) - if val_id in memo: - return memo[val_id] - memo[val_id] = out - for k, v in self.items(): - if not isinstance(v, RE_TYPE): - v = copy.deepcopy(v, memo) # noqa: PLW2901 - out[k] = v - return out diff --git a/venv/lib/python3.12/site-packages/bson/time64.c b/venv/lib/python3.12/site-packages/bson/time64.c deleted file mode 100644 index a21fbb90..00000000 --- a/venv/lib/python3.12/site-packages/bson/time64.c +++ /dev/null @@ -1,781 +0,0 @@ -/* - -Copyright (c) 2007-2010 Michael G Schwern - -This software originally derived from Paul Sheer's pivotal_gmtime_r.c. - -The MIT License: - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - -*/ - -/* - -Programmers who have available to them 64-bit time values as a 'long -long' type can use cbson_localtime64_r() and cbson_gmtime64_r() which correctly -converts the time even on 32-bit systems. Whether you have 64-bit time -values will depend on the operating system. - -cbson_localtime64_r() is a 64-bit equivalent of localtime_r(). - -cbson_gmtime64_r() is a 64-bit equivalent of gmtime_r(). - -*/ - -#ifdef _MSC_VER - #define _CRT_SECURE_NO_WARNINGS -#endif - -/* Including Python.h fixes issues with interpreters built with -std=c99. */ -#define PY_SSIZE_T_CLEAN -#include "Python.h" - -#include -#include "time64.h" -#include "time64_limits.h" - - -/* Spec says except for stftime() and the _r() functions, these - all return static memory. Stabbings! */ -static struct TM Static_Return_Date; - -static const int days_in_month[2][12] = { - {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, - {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, -}; - -static const int julian_days_by_month[2][12] = { - {0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334}, - {0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335}, -}; - -static const int length_of_year[2] = { 365, 366 }; - -/* Some numbers relating to the gregorian cycle */ -static const Year years_in_gregorian_cycle = 400; -#define days_in_gregorian_cycle ((365 * 400) + 100 - 4 + 1) -static const Time64_T seconds_in_gregorian_cycle = days_in_gregorian_cycle * 60LL * 60LL * 24LL; - -/* Year range we can trust the time functions with */ -#define MAX_SAFE_YEAR 2037 -#define MIN_SAFE_YEAR 1971 - -/* 28 year Julian calendar cycle */ -#define SOLAR_CYCLE_LENGTH 28 - -/* Year cycle from MAX_SAFE_YEAR down. */ -static const int safe_years_high[SOLAR_CYCLE_LENGTH] = { - 2016, 2017, 2018, 2019, - 2020, 2021, 2022, 2023, - 2024, 2025, 2026, 2027, - 2028, 2029, 2030, 2031, - 2032, 2033, 2034, 2035, - 2036, 2037, 2010, 2011, - 2012, 2013, 2014, 2015 -}; - -/* Year cycle from MIN_SAFE_YEAR up */ -static const int safe_years_low[SOLAR_CYCLE_LENGTH] = { - 1996, 1997, 1998, 1971, - 1972, 1973, 1974, 1975, - 1976, 1977, 1978, 1979, - 1980, 1981, 1982, 1983, - 1984, 1985, 1986, 1987, - 1988, 1989, 1990, 1991, - 1992, 1993, 1994, 1995, -}; - -/* Let's assume people are going to be looking for dates in the future. - Let's provide some cheats so you can skip ahead. - This has a 4x speed boost when near 2008. -*/ -/* Number of days since epoch on Jan 1st, 2008 GMT */ -#define CHEAT_DAYS (1199145600 / 24 / 60 / 60) -#define CHEAT_YEARS 108 - -#define IS_LEAP(n) ((!(((n) + 1900) % 400) || (!(((n) + 1900) % 4) && (((n) + 1900) % 100))) != 0) -#define _TIME64_WRAP(a,b,m) ((a) = ((a) < 0 ) ? ((b)--, (a) + (m)) : (a)) - -#ifdef USE_SYSTEM_LOCALTIME -# define SHOULD_USE_SYSTEM_LOCALTIME(a) ( \ - (a) <= SYSTEM_LOCALTIME_MAX && \ - (a) >= SYSTEM_LOCALTIME_MIN \ -) -#else -# define SHOULD_USE_SYSTEM_LOCALTIME(a) (0) -#endif - -#ifdef USE_SYSTEM_GMTIME -# define SHOULD_USE_SYSTEM_GMTIME(a) ( \ - (a) <= SYSTEM_GMTIME_MAX && \ - (a) >= SYSTEM_GMTIME_MIN \ -) -#else -# define SHOULD_USE_SYSTEM_GMTIME(a) (0) -#endif - -/* Multi varadic macros are a C99 thing, alas */ -#ifdef TIME_64_DEBUG -# define TIME64_TRACE(format) (fprintf(stderr, format)) -# define TIME64_TRACE1(format, var1) (fprintf(stderr, format, var1)) -# define TIME64_TRACE2(format, var1, var2) (fprintf(stderr, format, var1, var2)) -# define TIME64_TRACE3(format, var1, var2, var3) (fprintf(stderr, format, var1, var2, var3)) -#else -# define TIME64_TRACE(format) ((void)0) -# define TIME64_TRACE1(format, var1) ((void)0) -# define TIME64_TRACE2(format, var1, var2) ((void)0) -# define TIME64_TRACE3(format, var1, var2, var3) ((void)0) -#endif - - -static int is_exception_century(Year year) -{ - int is_exception = ((year % 100 == 0) && !(year % 400 == 0)); - TIME64_TRACE1("# is_exception_century: %s\n", is_exception ? "yes" : "no"); - - return(is_exception); -} - - -/* Compare two dates. - The result is like cmp. - Ignores things like gmtoffset and dst -*/ -int cbson_cmp_date( const struct TM* left, const struct tm* right ) { - if( left->tm_year > right->tm_year ) - return 1; - else if( left->tm_year < right->tm_year ) - return -1; - - if( left->tm_mon > right->tm_mon ) - return 1; - else if( left->tm_mon < right->tm_mon ) - return -1; - - if( left->tm_mday > right->tm_mday ) - return 1; - else if( left->tm_mday < right->tm_mday ) - return -1; - - if( left->tm_hour > right->tm_hour ) - return 1; - else if( left->tm_hour < right->tm_hour ) - return -1; - - if( left->tm_min > right->tm_min ) - return 1; - else if( left->tm_min < right->tm_min ) - return -1; - - if( left->tm_sec > right->tm_sec ) - return 1; - else if( left->tm_sec < right->tm_sec ) - return -1; - - return 0; -} - - -/* Check if a date is safely inside a range. - The intention is to check if its a few days inside. -*/ -int cbson_date_in_safe_range( const struct TM* date, const struct tm* min, const struct tm* max ) { - if( cbson_cmp_date(date, min) == -1 ) - return 0; - - if( cbson_cmp_date(date, max) == 1 ) - return 0; - - return 1; -} - - -/* timegm() is not in the C or POSIX spec, but it is such a useful - extension I would be remiss in leaving it out. Also I need it - for cbson_localtime64() -*/ -Time64_T cbson_timegm64(const struct TM *date) { - Time64_T days = 0; - Time64_T seconds = 0; - Year year; - Year orig_year = (Year)date->tm_year; - int cycles = 0; - - if( orig_year > 100 ) { - cycles = (int)((orig_year - 100) / 400); - orig_year -= cycles * 400; - days += (Time64_T)cycles * days_in_gregorian_cycle; - } - else if( orig_year < -300 ) { - cycles = (int)((orig_year - 100) / 400); - orig_year -= cycles * 400; - days += (Time64_T)cycles * days_in_gregorian_cycle; - } - TIME64_TRACE3("# timegm/ cycles: %d, days: %lld, orig_year: %lld\n", cycles, days, orig_year); - - if( orig_year > 70 ) { - year = 70; - while( year < orig_year ) { - days += length_of_year[IS_LEAP(year)]; - year++; - } - } - else if ( orig_year < 70 ) { - year = 69; - do { - days -= length_of_year[IS_LEAP(year)]; - year--; - } while( year >= orig_year ); - } - - days += julian_days_by_month[IS_LEAP(orig_year)][date->tm_mon]; - days += date->tm_mday - 1; - - seconds = days * 60 * 60 * 24; - - seconds += date->tm_hour * 60 * 60; - seconds += date->tm_min * 60; - seconds += date->tm_sec; - - return(seconds); -} - - -#ifndef NDEBUG -static int check_tm(struct TM *tm) -{ - /* Don't forget leap seconds */ - assert(tm->tm_sec >= 0); - assert(tm->tm_sec <= 61); - - assert(tm->tm_min >= 0); - assert(tm->tm_min <= 59); - - assert(tm->tm_hour >= 0); - assert(tm->tm_hour <= 23); - - assert(tm->tm_mday >= 1); - assert(tm->tm_mday <= days_in_month[IS_LEAP(tm->tm_year)][tm->tm_mon]); - - assert(tm->tm_mon >= 0); - assert(tm->tm_mon <= 11); - - assert(tm->tm_wday >= 0); - assert(tm->tm_wday <= 6); - - assert(tm->tm_yday >= 0); - assert(tm->tm_yday <= length_of_year[IS_LEAP(tm->tm_year)]); - -#ifdef HAS_TM_TM_GMTOFF - assert(tm->tm_gmtoff >= -24 * 60 * 60); - assert(tm->tm_gmtoff <= 24 * 60 * 60); -#endif - - return 1; -} -#endif - - -/* The exceptional centuries without leap years cause the cycle to - shift by 16 -*/ -static Year cycle_offset(Year year) -{ - const Year start_year = 2000; - Year year_diff = year - start_year; - Year exceptions; - - if( year > start_year ) - year_diff--; - - exceptions = year_diff / 100; - exceptions -= year_diff / 400; - - TIME64_TRACE3("# year: %lld, exceptions: %lld, year_diff: %lld\n", - year, exceptions, year_diff); - - return exceptions * 16; -} - -/* For a given year after 2038, pick the latest possible matching - year in the 28 year calendar cycle. - - A matching year... - 1) Starts on the same day of the week. - 2) Has the same leap year status. - - This is so the calendars match up. - - Also the previous year must match. When doing Jan 1st you might - wind up on Dec 31st the previous year when doing a -UTC time zone. - - Finally, the next year must have the same start day of week. This - is for Dec 31st with a +UTC time zone. - It doesn't need the same leap year status since we only care about - January 1st. -*/ -static int safe_year(const Year year) -{ - int safe_year = 0; - Year year_cycle; - - if( year >= MIN_SAFE_YEAR && year <= MAX_SAFE_YEAR ) { - return (int)year; - } - - year_cycle = year + cycle_offset(year); - - /* safe_years_low is off from safe_years_high by 8 years */ - if( year < MIN_SAFE_YEAR ) - year_cycle -= 8; - - /* Change non-leap xx00 years to an equivalent */ - if( is_exception_century(year) ) - year_cycle += 11; - - /* Also xx01 years, since the previous year will be wrong */ - if( is_exception_century(year - 1) ) - year_cycle += 17; - - year_cycle %= SOLAR_CYCLE_LENGTH; - if( year_cycle < 0 ) - year_cycle = SOLAR_CYCLE_LENGTH + year_cycle; - - assert( year_cycle >= 0 ); - assert( year_cycle < SOLAR_CYCLE_LENGTH ); - if( year < MIN_SAFE_YEAR ) - safe_year = safe_years_low[year_cycle]; - else if( year > MAX_SAFE_YEAR ) - safe_year = safe_years_high[year_cycle]; - else - assert(0); - - TIME64_TRACE3("# year: %lld, year_cycle: %lld, safe_year: %d\n", - year, year_cycle, safe_year); - - assert(safe_year <= MAX_SAFE_YEAR && safe_year >= MIN_SAFE_YEAR); - - return safe_year; -} - - -void pymongo_copy_tm_to_TM64(const struct tm *src, struct TM *dest) { - if( src == NULL ) { - memset(dest, 0, sizeof(*dest)); - } - else { -# ifdef USE_TM64 - dest->tm_sec = src->tm_sec; - dest->tm_min = src->tm_min; - dest->tm_hour = src->tm_hour; - dest->tm_mday = src->tm_mday; - dest->tm_mon = src->tm_mon; - dest->tm_year = (Year)src->tm_year; - dest->tm_wday = src->tm_wday; - dest->tm_yday = src->tm_yday; - dest->tm_isdst = src->tm_isdst; - -# ifdef HAS_TM_TM_GMTOFF - dest->tm_gmtoff = src->tm_gmtoff; -# endif - -# ifdef HAS_TM_TM_ZONE - dest->tm_zone = src->tm_zone; -# endif - -# else - /* They're the same type */ - memcpy(dest, src, sizeof(*dest)); -# endif - } -} - - -void cbson_copy_TM64_to_tm(const struct TM *src, struct tm *dest) { - if( src == NULL ) { - memset(dest, 0, sizeof(*dest)); - } - else { -# ifdef USE_TM64 - dest->tm_sec = src->tm_sec; - dest->tm_min = src->tm_min; - dest->tm_hour = src->tm_hour; - dest->tm_mday = src->tm_mday; - dest->tm_mon = src->tm_mon; - dest->tm_year = (int)src->tm_year; - dest->tm_wday = src->tm_wday; - dest->tm_yday = src->tm_yday; - dest->tm_isdst = src->tm_isdst; - -# ifdef HAS_TM_TM_GMTOFF - dest->tm_gmtoff = src->tm_gmtoff; -# endif - -# ifdef HAS_TM_TM_ZONE - dest->tm_zone = src->tm_zone; -# endif - -# else - /* They're the same type */ - memcpy(dest, src, sizeof(*dest)); -# endif - } -} - - -/* Simulate localtime_r() to the best of our ability */ -struct tm * cbson_fake_localtime_r(const time_t *time, struct tm *result) { - const struct tm *static_result = localtime(time); - - assert(result != NULL); - - if( static_result == NULL ) { - memset(result, 0, sizeof(*result)); - return NULL; - } - else { - memcpy(result, static_result, sizeof(*result)); - return result; - } -} - - -/* Simulate gmtime_r() to the best of our ability */ -struct tm * cbson_fake_gmtime_r(const time_t *time, struct tm *result) { - const struct tm *static_result = gmtime(time); - - assert(result != NULL); - - if( static_result == NULL ) { - memset(result, 0, sizeof(*result)); - return NULL; - } - else { - memcpy(result, static_result, sizeof(*result)); - return result; - } -} - - -static Time64_T seconds_between_years(Year left_year, Year right_year) { - int increment = (left_year > right_year) ? 1 : -1; - Time64_T seconds = 0; - int cycles; - - if( left_year > 2400 ) { - cycles = (int)((left_year - 2400) / 400); - left_year -= cycles * 400; - seconds += cycles * seconds_in_gregorian_cycle; - } - else if( left_year < 1600 ) { - cycles = (int)((left_year - 1600) / 400); - left_year += cycles * 400; - seconds += cycles * seconds_in_gregorian_cycle; - } - - while( left_year != right_year ) { - seconds += length_of_year[IS_LEAP(right_year - 1900)] * 60 * 60 * 24; - right_year += increment; - } - - return seconds * increment; -} - - -Time64_T cbson_mktime64(const struct TM *input_date) { - struct tm safe_date; - struct TM date; - Time64_T time; - Year year = input_date->tm_year + 1900; - - if( cbson_date_in_safe_range(input_date, &SYSTEM_MKTIME_MIN, &SYSTEM_MKTIME_MAX) ) - { - cbson_copy_TM64_to_tm(input_date, &safe_date); - return (Time64_T)mktime(&safe_date); - } - - /* Have to make the year safe in date else it won't fit in safe_date */ - date = *input_date; - date.tm_year = safe_year(year) - 1900; - cbson_copy_TM64_to_tm(&date, &safe_date); - - time = (Time64_T)mktime(&safe_date); - - time += seconds_between_years(year, (Year)(safe_date.tm_year + 1900)); - - return time; -} - - -/* Because I think mktime() is a crappy name */ -Time64_T timelocal64(const struct TM *date) { - return cbson_mktime64(date); -} - - -struct TM *cbson_gmtime64_r (const Time64_T *in_time, struct TM *p) -{ - int v_tm_sec, v_tm_min, v_tm_hour, v_tm_mon, v_tm_wday; - Time64_T v_tm_tday; - int leap; - Time64_T m; - Time64_T time = *in_time; - Year year = 70; - int cycles = 0; - - assert(p != NULL); - -#ifdef USE_SYSTEM_GMTIME - /* Use the system gmtime() if time_t is small enough */ - if( SHOULD_USE_SYSTEM_GMTIME(*in_time) ) { - time_t safe_time = (time_t)*in_time; - struct tm safe_date; - GMTIME_R(&safe_time, &safe_date); - - pymongo_copy_tm_to_TM64(&safe_date, p); - assert(check_tm(p)); - - return p; - } -#endif - -#ifdef HAS_TM_TM_GMTOFF - p->tm_gmtoff = 0; -#endif - p->tm_isdst = 0; - -#ifdef HAS_TM_TM_ZONE - p->tm_zone = "UTC"; -#endif - - v_tm_sec = (int)(time % 60); - time /= 60; - v_tm_min = (int)(time % 60); - time /= 60; - v_tm_hour = (int)(time % 24); - time /= 24; - v_tm_tday = time; - - _TIME64_WRAP (v_tm_sec, v_tm_min, 60); - _TIME64_WRAP (v_tm_min, v_tm_hour, 60); - _TIME64_WRAP (v_tm_hour, v_tm_tday, 24); - - v_tm_wday = (int)((v_tm_tday + 4) % 7); - if (v_tm_wday < 0) - v_tm_wday += 7; - m = v_tm_tday; - - if (m >= CHEAT_DAYS) { - year = CHEAT_YEARS; - m -= CHEAT_DAYS; - } - - if (m >= 0) { - /* Gregorian cycles, this is huge optimization for distant times */ - cycles = (int)(m / (Time64_T) days_in_gregorian_cycle); - if( cycles ) { - m -= (cycles * (Time64_T) days_in_gregorian_cycle); - year += (cycles * years_in_gregorian_cycle); - } - - /* Years */ - leap = IS_LEAP (year); - while (m >= (Time64_T) length_of_year[leap]) { - m -= (Time64_T) length_of_year[leap]; - year++; - leap = IS_LEAP (year); - } - - /* Months */ - v_tm_mon = 0; - while (m >= (Time64_T) days_in_month[leap][v_tm_mon]) { - m -= (Time64_T) days_in_month[leap][v_tm_mon]; - v_tm_mon++; - } - } else { - year--; - - /* Gregorian cycles */ - cycles = (int)((m / (Time64_T) days_in_gregorian_cycle) + 1); - if( cycles ) { - m -= (cycles * (Time64_T) days_in_gregorian_cycle); - year += (cycles * years_in_gregorian_cycle); - } - - /* Years */ - leap = IS_LEAP (year); - while (m < (Time64_T) -length_of_year[leap]) { - m += (Time64_T) length_of_year[leap]; - year--; - leap = IS_LEAP (year); - } - - /* Months */ - v_tm_mon = 11; - while (m < (Time64_T) -days_in_month[leap][v_tm_mon]) { - m += (Time64_T) days_in_month[leap][v_tm_mon]; - v_tm_mon--; - } - m += (Time64_T) days_in_month[leap][v_tm_mon]; - } - - p->tm_year = (int)year; - if( p->tm_year != year ) { -#ifdef EOVERFLOW - errno = EOVERFLOW; -#endif - return NULL; - } - - /* At this point m is less than a year so casting to an int is safe */ - p->tm_mday = (int) m + 1; - p->tm_yday = julian_days_by_month[leap][v_tm_mon] + (int)m; - p->tm_sec = v_tm_sec; - p->tm_min = v_tm_min; - p->tm_hour = v_tm_hour; - p->tm_mon = v_tm_mon; - p->tm_wday = v_tm_wday; - - assert(check_tm(p)); - - return p; -} - - -struct TM *cbson_localtime64_r (const Time64_T *time, struct TM *local_tm) -{ - time_t safe_time; - struct tm safe_date; - struct TM gm_tm; - Year orig_year; - int month_diff; - - assert(local_tm != NULL); - -#ifdef USE_SYSTEM_LOCALTIME - /* Use the system localtime() if time_t is small enough */ - if( SHOULD_USE_SYSTEM_LOCALTIME(*time) ) { - safe_time = (time_t)*time; - - TIME64_TRACE1("Using system localtime for %lld\n", *time); - - LOCALTIME_R(&safe_time, &safe_date); - - pymongo_copy_tm_to_TM64(&safe_date, local_tm); - assert(check_tm(local_tm)); - - return local_tm; - } -#endif - - if( cbson_gmtime64_r(time, &gm_tm) == NULL ) { - TIME64_TRACE1("cbson_gmtime64_r returned null for %lld\n", *time); - return NULL; - } - - orig_year = gm_tm.tm_year; - - if (gm_tm.tm_year > (2037 - 1900) || - gm_tm.tm_year < (1970 - 1900) - ) - { - TIME64_TRACE1("Mapping tm_year %lld to safe_year\n", (Year)gm_tm.tm_year); - gm_tm.tm_year = safe_year((Year)(gm_tm.tm_year + 1900)) - 1900; - } - - safe_time = (time_t)cbson_timegm64(&gm_tm); - if( LOCALTIME_R(&safe_time, &safe_date) == NULL ) { - TIME64_TRACE1("localtime_r(%d) returned NULL\n", (int)safe_time); - return NULL; - } - - pymongo_copy_tm_to_TM64(&safe_date, local_tm); - - local_tm->tm_year = (int)orig_year; - if( local_tm->tm_year != orig_year ) { - TIME64_TRACE2("tm_year overflow: tm_year %lld, orig_year %lld\n", - (Year)local_tm->tm_year, (Year)orig_year); - -#ifdef EOVERFLOW - errno = EOVERFLOW; -#endif - return NULL; - } - - - month_diff = local_tm->tm_mon - gm_tm.tm_mon; - - /* When localtime is Dec 31st previous year and - gmtime is Jan 1st next year. - */ - if( month_diff == 11 ) { - local_tm->tm_year--; - } - - /* When localtime is Jan 1st, next year and - gmtime is Dec 31st, previous year. - */ - if( month_diff == -11 ) { - local_tm->tm_year++; - } - - /* GMT is Jan 1st, xx01 year, but localtime is still Dec 31st - in a non-leap xx00. There is one point in the cycle - we can't account for which the safe xx00 year is a leap - year. So we need to correct for Dec 31st coming out as - the 366th day of the year. - */ - if( !IS_LEAP(local_tm->tm_year) && local_tm->tm_yday == 365 ) - local_tm->tm_yday--; - - assert(check_tm(local_tm)); - - return local_tm; -} - - -int cbson_valid_tm_wday( const struct TM* date ) { - if( 0 <= date->tm_wday && date->tm_wday <= 6 ) - return 1; - else - return 0; -} - -int cbson_valid_tm_mon( const struct TM* date ) { - if( 0 <= date->tm_mon && date->tm_mon <= 11 ) - return 1; - else - return 0; -} - - -/* Non-thread safe versions of the above */ -struct TM *cbson_localtime64(const Time64_T *time) { -#ifdef _MSC_VER - _tzset(); -#else - tzset(); -#endif - return cbson_localtime64_r(time, &Static_Return_Date); -} - -struct TM *cbson_gmtime64(const Time64_T *time) { - return cbson_gmtime64_r(time, &Static_Return_Date); -} diff --git a/venv/lib/python3.12/site-packages/bson/time64.h b/venv/lib/python3.12/site-packages/bson/time64.h deleted file mode 100644 index 6321eb30..00000000 --- a/venv/lib/python3.12/site-packages/bson/time64.h +++ /dev/null @@ -1,67 +0,0 @@ -#ifndef TIME64_H -# define TIME64_H - -#include -#include "time64_config.h" - -/* Set our custom types */ -typedef INT_64_T Int64; -typedef Int64 Time64_T; -typedef Int64 Year; - - -/* A copy of the tm struct but with a 64 bit year */ -struct TM64 { - int tm_sec; - int tm_min; - int tm_hour; - int tm_mday; - int tm_mon; - Year tm_year; - int tm_wday; - int tm_yday; - int tm_isdst; - -#ifdef HAS_TM_TM_GMTOFF - long tm_gmtoff; -#endif - -#ifdef HAS_TM_TM_ZONE - char *tm_zone; -#endif -}; - - -/* Decide which tm struct to use */ -#ifdef USE_TM64 -#define TM TM64 -#else -#define TM tm -#endif - - -/* Declare public functions */ -struct TM *cbson_gmtime64_r (const Time64_T *, struct TM *); -struct TM *cbson_localtime64_r (const Time64_T *, struct TM *); -struct TM *cbson_gmtime64 (const Time64_T *); -struct TM *cbson_localtime64 (const Time64_T *); - -Time64_T cbson_timegm64 (const struct TM *); -Time64_T cbson_mktime64 (const struct TM *); -Time64_T timelocal64 (const struct TM *); - - -/* Not everyone has gm/localtime_r(), provide a replacement */ -#ifdef HAS_LOCALTIME_R -# define LOCALTIME_R(clock, result) localtime_r(clock, result) -#else -# define LOCALTIME_R(clock, result) cbson_fake_localtime_r(clock, result) -#endif -#ifdef HAS_GMTIME_R -# define GMTIME_R(clock, result) gmtime_r(clock, result) -#else -# define GMTIME_R(clock, result) cbson_fake_gmtime_r(clock, result) -#endif - - -#endif diff --git a/venv/lib/python3.12/site-packages/bson/time64_config.h b/venv/lib/python3.12/site-packages/bson/time64_config.h deleted file mode 100644 index 9d4c111c..00000000 --- a/venv/lib/python3.12/site-packages/bson/time64_config.h +++ /dev/null @@ -1,78 +0,0 @@ -/* Configuration - ------------- - Define as appropriate for your system. - Sensible defaults provided. -*/ - - -#ifndef TIME64_CONFIG_H -# define TIME64_CONFIG_H - -/* Debugging - TIME_64_DEBUG - Define if you want debugging messages -*/ -/* #define TIME_64_DEBUG */ - - -/* INT_64_T - A 64 bit integer type to use to store time and others. - Must be defined. -*/ -#define INT_64_T long long - - -/* USE_TM64 - Should we use a 64 bit safe replacement for tm? This will - let you go past year 2 billion but the struct will be incompatible - with tm. Conversion functions will be provided. -*/ -/* #define USE_TM64 */ - - -/* Availability of system functions. - - HAS_GMTIME_R - Define if your system has gmtime_r() - - HAS_LOCALTIME_R - Define if your system has localtime_r() - - HAS_TIMEGM - Define if your system has timegm(), a GNU extension. -*/ -#if !defined(WIN32) && !defined(_MSC_VER) -#define HAS_GMTIME_R -#define HAS_LOCALTIME_R -#endif -/* #define HAS_TIMEGM */ - - -/* Details of non-standard tm struct elements. - - HAS_TM_TM_GMTOFF - True if your tm struct has a "tm_gmtoff" element. - A BSD extension. - - HAS_TM_TM_ZONE - True if your tm struct has a "tm_zone" element. - A BSD extension. -*/ -/* #define HAS_TM_TM_GMTOFF */ -/* #define HAS_TM_TM_ZONE */ - - -/* USE_SYSTEM_LOCALTIME - USE_SYSTEM_GMTIME - USE_SYSTEM_MKTIME - USE_SYSTEM_TIMEGM - Should we use the system functions if the time is inside their range? - Your system localtime() is probably more accurate, but our gmtime() is - fast and safe. -*/ -#define USE_SYSTEM_LOCALTIME -/* #define USE_SYSTEM_GMTIME */ -#define USE_SYSTEM_MKTIME -/* #define USE_SYSTEM_TIMEGM */ - -#endif /* TIME64_CONFIG_H */ diff --git a/venv/lib/python3.12/site-packages/bson/time64_limits.h b/venv/lib/python3.12/site-packages/bson/time64_limits.h deleted file mode 100644 index 1d30607b..00000000 --- a/venv/lib/python3.12/site-packages/bson/time64_limits.h +++ /dev/null @@ -1,95 +0,0 @@ -/* - Maximum and minimum inputs your system's respective time functions - can correctly handle. time64.h will use your system functions if - the input falls inside these ranges and corresponding USE_SYSTEM_* - constant is defined. -*/ - -#ifndef TIME64_LIMITS_H -#define TIME64_LIMITS_H - -/* Max/min for localtime() */ -#define SYSTEM_LOCALTIME_MAX 2147483647 -#define SYSTEM_LOCALTIME_MIN -2147483647-1 - -/* Max/min for gmtime() */ -#define SYSTEM_GMTIME_MAX 2147483647 -#define SYSTEM_GMTIME_MIN -2147483647-1 - -/* Max/min for mktime() */ -static const struct tm SYSTEM_MKTIME_MAX = { - 7, - 14, - 19, - 18, - 0, - 138, - 1, - 17, - 0 -#ifdef HAS_TM_TM_GMTOFF - ,-28800 -#endif -#ifdef HAS_TM_TM_ZONE - ,"PST" -#endif -}; - -static const struct tm SYSTEM_MKTIME_MIN = { - 52, - 45, - 12, - 13, - 11, - 1, - 5, - 346, - 0 -#ifdef HAS_TM_TM_GMTOFF - ,-28800 -#endif -#ifdef HAS_TM_TM_ZONE - ,"PST" -#endif -}; - -/* Max/min for timegm() */ -#ifdef HAS_TIMEGM -static const struct tm SYSTEM_TIMEGM_MAX = { - 7, - 14, - 3, - 19, - 0, - 138, - 2, - 18, - 0 - #ifdef HAS_TM_TM_GMTOFF - ,0 - #endif - #ifdef HAS_TM_TM_ZONE - ,"UTC" - #endif -}; - -static const struct tm SYSTEM_TIMEGM_MIN = { - 52, - 45, - 20, - 13, - 11, - 1, - 5, - 346, - 0 - #ifdef HAS_TM_TM_GMTOFF - ,0 - #endif - #ifdef HAS_TM_TM_ZONE - ,"UTC" - #endif -}; -#endif /* HAS_TIMEGM */ - -#endif /* TIME64_LIMITS_H */ diff --git a/venv/lib/python3.12/site-packages/bson/timestamp.py b/venv/lib/python3.12/site-packages/bson/timestamp.py deleted file mode 100644 index 949bd7b3..00000000 --- a/venv/lib/python3.12/site-packages/bson/timestamp.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2010-2015 MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for representing MongoDB internal Timestamps.""" -from __future__ import annotations - -import calendar -import datetime -from typing import Any, Union - -from bson._helpers import _getstate_slots, _setstate_slots -from bson.tz_util import utc - -UPPERBOUND = 4294967296 - - -class Timestamp: - """MongoDB internal timestamps used in the opLog.""" - - __slots__ = ("__time", "__inc") - - __getstate__ = _getstate_slots - __setstate__ = _setstate_slots - - _type_marker = 17 - - def __init__(self, time: Union[datetime.datetime, int], inc: int) -> None: - """Create a new :class:`Timestamp`. - - This class is only for use with the MongoDB opLog. If you need - to store a regular timestamp, please use a - :class:`~datetime.datetime`. - - Raises :class:`TypeError` if `time` is not an instance of - :class: `int` or :class:`~datetime.datetime`, or `inc` is not - an instance of :class:`int`. Raises :class:`ValueError` if - `time` or `inc` is not in [0, 2**32). - - :param time: time in seconds since epoch UTC, or a naive UTC - :class:`~datetime.datetime`, or an aware - :class:`~datetime.datetime` - :param inc: the incrementing counter - """ - if isinstance(time, datetime.datetime): - offset = time.utcoffset() - if offset is not None: - time = time - offset - time = int(calendar.timegm(time.timetuple())) - if not isinstance(time, int): - raise TypeError(f"time must be an instance of int, not {type(time)}") - if not isinstance(inc, int): - raise TypeError(f"inc must be an instance of int, not {type(inc)}") - if not 0 <= time < UPPERBOUND: - raise ValueError("time must be contained in [0, 2**32)") - if not 0 <= inc < UPPERBOUND: - raise ValueError("inc must be contained in [0, 2**32)") - - self.__time = time - self.__inc = inc - - @property - def time(self) -> int: - """Get the time portion of this :class:`Timestamp`.""" - return self.__time - - @property - def inc(self) -> int: - """Get the inc portion of this :class:`Timestamp`.""" - return self.__inc - - def __eq__(self, other: Any) -> bool: - if isinstance(other, Timestamp): - return self.__time == other.time and self.__inc == other.inc - else: - return NotImplemented - - def __hash__(self) -> int: - return hash(self.time) ^ hash(self.inc) - - def __ne__(self, other: Any) -> bool: - return not self == other - - def __lt__(self, other: Any) -> bool: - if isinstance(other, Timestamp): - return (self.time, self.inc) < (other.time, other.inc) - return NotImplemented - - def __le__(self, other: Any) -> bool: - if isinstance(other, Timestamp): - return (self.time, self.inc) <= (other.time, other.inc) - return NotImplemented - - def __gt__(self, other: Any) -> bool: - if isinstance(other, Timestamp): - return (self.time, self.inc) > (other.time, other.inc) - return NotImplemented - - def __ge__(self, other: Any) -> bool: - if isinstance(other, Timestamp): - return (self.time, self.inc) >= (other.time, other.inc) - return NotImplemented - - def __repr__(self) -> str: - return f"Timestamp({self.__time}, {self.__inc})" - - def as_datetime(self) -> datetime.datetime: - """Return a :class:`~datetime.datetime` instance corresponding - to the time portion of this :class:`Timestamp`. - - The returned datetime's timezone is UTC. - """ - return datetime.datetime.fromtimestamp(self.__time, utc) diff --git a/venv/lib/python3.12/site-packages/bson/typings.py b/venv/lib/python3.12/site-packages/bson/typings.py deleted file mode 100644 index b80c6614..00000000 --- a/venv/lib/python3.12/site-packages/bson/typings.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2023-Present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Type aliases used by bson""" -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, TypeVar, Union - -if TYPE_CHECKING: - from array import array - from mmap import mmap - - from bson.raw_bson import RawBSONDocument - - -# Common Shared Types. -_DocumentOut = Union[MutableMapping[str, Any], "RawBSONDocument"] -_DocumentType = TypeVar("_DocumentType", bound=Mapping[str, Any]) -_DocumentTypeArg = TypeVar("_DocumentTypeArg", bound=Mapping[str, Any]) -_ReadableBuffer = Union[bytes, memoryview, "mmap", "array"] diff --git a/venv/lib/python3.12/site-packages/bson/tz_util.py b/venv/lib/python3.12/site-packages/bson/tz_util.py deleted file mode 100644 index 4d31c04f..00000000 --- a/venv/lib/python3.12/site-packages/bson/tz_util.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2010-2015 MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Timezone related utilities for BSON.""" -from __future__ import annotations - -from datetime import datetime, timedelta, tzinfo -from typing import Optional, Tuple, Union - -ZERO: timedelta = timedelta(0) - - -class FixedOffset(tzinfo): - """Fixed offset timezone, in minutes east from UTC. - - Implementation based from the Python `standard library documentation - `_. - Defining __getinitargs__ enables pickling / copying. - """ - - def __init__(self, offset: Union[float, timedelta], name: str) -> None: - if isinstance(offset, timedelta): - self.__offset = offset - else: - self.__offset = timedelta(minutes=offset) - self.__name = name - - def __getinitargs__(self) -> Tuple[timedelta, str]: - return self.__offset, self.__name - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.__offset!r}, {self.__name!r})" - - def utcoffset(self, dt: Optional[datetime]) -> timedelta: - return self.__offset - - def tzname(self, dt: Optional[datetime]) -> str: - return self.__name - - def dst(self, dt: Optional[datetime]) -> timedelta: - return ZERO - - -utc: FixedOffset = FixedOffset(0, "UTC") -"""Fixed offset timezone representing UTC.""" diff --git a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/METADATA b/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/METADATA deleted file mode 100644 index a2dece83..00000000 --- a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/METADATA +++ /dev/null @@ -1,77 +0,0 @@ -Metadata-Version: 2.4 -Name: certifi -Version: 2025.7.14 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://github.com/certifi/python-certifi -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Project-URL: Source, https://github.com/certifi/python-certifi -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=3.7 -License-File: LICENSE -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: license -Dynamic: license-file -Dynamic: project-url -Dynamic: requires-python -Dynamic: summary - -Certifi: Python SSL Certificates -================================ - -Certifi provides Mozilla's carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python3.7/site-packages/certifi/cacert.pem - -Enjoy! - -.. _`Requests`: https://requests.readthedocs.io/en/master/ - -Addition/Removal of Certificates --------------------------------- - -Certifi does not support any addition/removal or other modification of the -CA trust store content. This project is intended to provide a reliable and -highly portable root of trust to python deployments. Look to upstream projects -for methods to use alternate trust. diff --git a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/RECORD b/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/RECORD deleted file mode 100644 index 53636b1a..00000000 --- a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -certifi-2025.7.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi-2025.7.14.dist-info/METADATA,sha256=7cmDKO0a-fVcP_WCfZ4DpLKgDMu_AEAYhTT5lswQToY,2423 -certifi-2025.7.14.dist-info/RECORD,, -certifi-2025.7.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 -certifi-2025.7.14.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 -certifi-2025.7.14.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi/__init__.py,sha256=dvNDUdAXp-hzoYcf09PXzLmHIlPfypaBQPFp5esDXyo,94 -certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 -certifi/__pycache__/__init__.cpython-312.pyc,, -certifi/__pycache__/__main__.cpython-312.pyc,, -certifi/__pycache__/core.cpython-312.pyc,, -certifi/cacert.pem,sha256=lm3cYJxEv9SoAx4Atc3NiT1D92d965vcID6H39f_93o,290057 -certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394 -certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/WHEEL b/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/WHEEL deleted file mode 100644 index e7fa31b6..00000000 --- a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.9.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE deleted file mode 100644 index 62b076cd..00000000 --- a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/licenses/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -This package contains a modified version of ca-bundle.crt: - -ca-bundle.crt -- Bundle of CA Root Certificates - -This is a bundle of X.509 certificates of public Certificate Authorities -(CA). These were automatically extracted from Mozilla's root certificates -file (certdata.txt). This file can be found in the mozilla source tree: -https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt -It contains the certificates in PEM format and therefore -can be directly used with curl / libcurl / php_curl, or with -an Apache+mod_ssl webserver for SSL client authentication. -Just configure this file as the SSLCACertificateFile.# - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** -@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/top_level.txt deleted file mode 100644 index 963eac53..00000000 --- a/venv/lib/python3.12/site-packages/certifi-2025.7.14.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/venv/lib/python3.12/site-packages/certifi/__init__.py b/venv/lib/python3.12/site-packages/certifi/__init__.py deleted file mode 100644 index e8370493..00000000 --- a/venv/lib/python3.12/site-packages/certifi/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import contents, where - -__all__ = ["contents", "where"] -__version__ = "2025.07.14" diff --git a/venv/lib/python3.12/site-packages/certifi/__main__.py b/venv/lib/python3.12/site-packages/certifi/__main__.py deleted file mode 100644 index 8945b5da..00000000 --- a/venv/lib/python3.12/site-packages/certifi/__main__.py +++ /dev/null @@ -1,12 +0,0 @@ -import argparse - -from certifi import contents, where - -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--contents", action="store_true") -args = parser.parse_args() - -if args.contents: - print(contents()) -else: - print(where()) diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 52817508..00000000 Binary files a/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index b8856441..00000000 Binary files a/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc deleted file mode 100644 index 684c4776..00000000 Binary files a/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/certifi/cacert.pem b/venv/lib/python3.12/site-packages/certifi/cacert.pem deleted file mode 100644 index 64c05d7f..00000000 --- a/venv/lib/python3.12/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4778 +0,0 @@ - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft ECC Root Certificate Authority 2017" -# Serial: 136839042543790627607696632466672567020 -# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 -# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 -# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 ------BEGIN CERTIFICATE----- -MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD -VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw -MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV -UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy -b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR -ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb -hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 -FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV -L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB -iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= ------END CERTIFICATE----- - -# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft RSA Root Certificate Authority 2017" -# Serial: 40975477897264996090493496164228220339 -# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 -# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 -# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 ------BEGIN CERTIFICATE----- -MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl -MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw -NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG -EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N -aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ -Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 -ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 -HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm -gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ -jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc -aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG -YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 -W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K -UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH -+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q -W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC -LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC -gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 -tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh -SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 -TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 -pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR -xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp -GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 -dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN -AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB -RA+GsCyRxj3qrg+E ------END CERTIFICATE----- - -# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Label: "e-Szigno Root CA 2017" -# Serial: 411379200276854331539784714 -# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 -# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 -# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 ------BEGIN CERTIFICATE----- -MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV -BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk -LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv -b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ -BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg -THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v -IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv -xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H -Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB -eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo -jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ -+efcMQ== ------END CERTIFICATE----- - -# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Label: "certSIGN Root CA G2" -# Serial: 313609486401300475190 -# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 -# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 -# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV -BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g -Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ -BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ -R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF -dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw -vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ -uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp -n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs -cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW -xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P -rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF -DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx -DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy -LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C -eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ -d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq -kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC -b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl -qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 -OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c -NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk -ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO -pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj -03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk -PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE -1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX -QRBdJ3NghVdJIgc= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global Certification Authority" -# Serial: 1846098327275375458322922162 -# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e -# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 -# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 ------BEGIN CERTIFICATE----- -MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw -CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x -ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 -c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx -OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI -b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn -swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu -7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 -1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW -80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP -JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l -RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw -hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 -coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc -BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n -twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud -DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W -0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe -uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q -lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB -aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE -sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT -MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe -qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh -VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 -h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 -EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK -yeC2nOnOcXHebD8WpHk= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P256 Certification Authority" -# Serial: 4151900041497450638097112925 -# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 -# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf -# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 ------BEGIN CERTIFICATE----- -MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG -SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN -FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w -DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw -CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh -DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P384 Certification Authority" -# Serial: 2704997926503831671788816187 -# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 -# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 -# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 ------BEGIN CERTIFICATE----- -MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB -BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ -j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF -1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G -A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 -AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC -MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu -Sw== ------END CERTIFICATE----- - -# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Label: "NAVER Global Root Certification Authority" -# Serial: 9013692873798656336226253319739695165984492813 -# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b -# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 -# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 ------BEGIN CERTIFICATE----- -MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM -BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG -T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx -CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD -b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA -iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH -38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE -HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz -kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP -szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq -vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf -nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG -YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo -0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a -CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K -AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I -36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN -qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj -cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm -+LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL -hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe -lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 -p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 -piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR -LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX -5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO -dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul -9XXeifdy ------END CERTIFICATE----- - -# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" -# Serial: 131542671362353147877283741781055151509 -# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb -# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a -# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb ------BEGIN CERTIFICATE----- -MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw -CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw -FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S -Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 -MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL -DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS -QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH -sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK -Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu -SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC -MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy -v+c= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Label: "GlobalSign Root R46" -# Serial: 1552617688466950547958867513931858518042577 -# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef -# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 -# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA -MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD -VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy -MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt -c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ -OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG -vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud -316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo -0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE -y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF -zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE -+cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN -I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs -x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa -ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC -4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 -7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg -JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti -2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk -pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF -FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt -rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk -ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 -u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP -4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 -N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 -vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Label: "GlobalSign Root E46" -# Serial: 1552617690338932563915843282459653771421763 -# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f -# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 -# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 ------BEGIN CERTIFICATE----- -MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx -CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD -ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw -MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex -HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq -R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd -yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ -7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 -+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= ------END CERTIFICATE----- - -# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Label: "GLOBALTRUST 2020" -# Serial: 109160994242082918454945253 -# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 -# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 -# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a ------BEGIN CERTIFICATE----- -MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG -A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw -FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx -MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u -aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq -hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b -RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z -YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 -QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw -yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ -BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ -SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH -r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 -4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me -dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw -q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 -nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu -H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA -VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC -XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd -6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf -+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi -kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 -wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB -TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C -MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn -4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I -aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy -qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== ------END CERTIFICATE----- - -# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Label: "ANF Secure Server Root CA" -# Serial: 996390341000653745 -# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 -# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 -# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 ------BEGIN CERTIFICATE----- -MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV -BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk -YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV -BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN -MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF -UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD -VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v -dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj -cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q -yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH -2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX -H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL -zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR -p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz -W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ -SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn -LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 -n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B -u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj -o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L -9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej -rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK -pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 -vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq -OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ -/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 -2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI -+PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 -MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo -tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= ------END CERTIFICATE----- - -# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum EC-384 CA" -# Serial: 160250656287871593594747141429395092468 -# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 -# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed -# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 ------BEGIN CERTIFICATE----- -MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw -CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw -JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT -EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 -WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT -LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX -BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE -KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm -Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 -EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J -UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn -nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Root CA" -# Serial: 40870380103424195783807378461123655149 -# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 -# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 -# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd ------BEGIN CERTIFICATE----- -MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 -MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu -MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV -BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw -MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg -U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ -n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q -p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq -NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF -8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 -HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa -mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi -7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF -ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P -qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ -v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 -Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 -vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD -ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 -WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo -zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR -5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ -GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf -5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq -0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D -P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM -qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP -0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf -E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb ------END CERTIFICATE----- - -# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Label: "TunTrust Root CA" -# Serial: 108534058042236574382096126452369648152337120275 -# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 -# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb -# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL -BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg -Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv -b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG -EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u -IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ -n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd -2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF -VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ -GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF -li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU -r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 -eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb -MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg -jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB -7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW -5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE -ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 -90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z -xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu -QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 -FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH -22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP -xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn -dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 -Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b -nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ -CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH -u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj -d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS RSA Root CA 2021" -# Serial: 76817823531813593706434026085292783742 -# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 -# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d -# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg -Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL -MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl -YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv -b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l -mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE -4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv -a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M -pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw -Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b -LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY -AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB -AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq -E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr -W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ -CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU -X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 -f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja -H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP -JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P -zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt -jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 -/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT -BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 -aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW -xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU -63ZTGI0RmLo= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS ECC Root CA 2021" -# Serial: 137515985548005187474074462014555733966 -# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 -# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 -# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 ------BEGIN CERTIFICATE----- -MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw -CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh -cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v -dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG -A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj -aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg -Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 -KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y -STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD -AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw -SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN -nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 1977337328857672817 -# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 -# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe -# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 -MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc -tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd -IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j -b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC -AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw -ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m -iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF -Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ -hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P -Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE -EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV -1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t -CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR -5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw -f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 -ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK -GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV ------END CERTIFICATE----- - -# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus ECC Root CA" -# Serial: 630369271402956006249506845124680065938238527194 -# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 -# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 -# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 ------BEGIN CERTIFICATE----- -MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw -RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY -BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz -MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u -LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 -v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd -e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw -V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA -AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG -GJTO ------END CERTIFICATE----- - -# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus Root CA" -# Serial: 387574501246983434957692974888460947164905180485 -# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc -# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 -# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 ------BEGIN CERTIFICATE----- -MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL -BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x -FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx -MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s -THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc -IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU -AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ -GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 -8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH -flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt -J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim -0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN -pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ -UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW -OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB -AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet -8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd -nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j -bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM -Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv -TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS -S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr -I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 -b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB -UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P -Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven -sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X2 O=Internet Security Research Group -# Subject: CN=ISRG Root X2 O=Internet Security Research Group -# Label: "ISRG Root X2" -# Serial: 87493402998870891108772069816698636114 -# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 -# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af -# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- - -# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Label: "HiPKI Root CA - G1" -# Serial: 60966262342023497858655262305426234976 -# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 -# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 -# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc ------BEGIN CERTIFICATE----- -MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa -Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 -YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw -qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv -Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 -lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz -Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ -KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK -FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj -HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr -y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ -/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM -a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 -fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG -SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi -7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc -SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza -ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc -XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg -iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho -L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF -Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr -kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ -vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU -YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 159662223612894884239637590694 -# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc -# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 -# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 ------BEGIN CERTIFICATE----- -MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD -VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw -MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g -UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT -BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx -uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV -HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ -+wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 -bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 159662320309726417404178440727 -# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 -# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a -# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo -27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w -Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw -TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl -qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH -szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 -Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk -MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 -wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p -aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN -VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb -C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe -QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy -h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 -7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J -ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef -MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ -Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT -6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ -0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm -2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb -bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 159662449406622349769042896298 -# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc -# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 -# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt -nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY -6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu -MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k -RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg -f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV -+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo -dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW -Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa -G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq -gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H -vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 -0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC -B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u -NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg -yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev -HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 -xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR -TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg -JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV -7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl -6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 159662495401136852707857743206 -# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 -# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 -# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G -jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 -4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 -VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm -ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 159662532700760215368942768210 -# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 -# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 -# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi -QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR -HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D -9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 -p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD ------END CERTIFICATE----- - -# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj -# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj -# Label: "Telia Root CA v2" -# Serial: 7288924052977061235122729490515358 -# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 -# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd -# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx -CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE -AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 -NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ -MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq -AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 -vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 -lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD -n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT -7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o -6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC -TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 -WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R -DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI -pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj -YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy -rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ -8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi -0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM -A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS -SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K -TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF -6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er -3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt -Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT -VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW -ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA -rBPuUBQemMc= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 1 2020" -# Serial: 165870826978392376648679885835942448534 -# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed -# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 -# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 -NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS -zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 -QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ -VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW -wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV -dWNbFJWcHwHP2NVypw87 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 1 2020" -# Serial: 126288379621884218666039612629459926992 -# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e -# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 -# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 -NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC -/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD -wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 -OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA -y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb -gfM0agPnIjhQW+0ZT0MW ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS ECC P384 Root G5" -# Serial: 13129116028163249804115411775095713523 -# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed -# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee -# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 ------BEGIN CERTIFICATE----- -MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp -Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 -MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ -bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS -7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp -0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS -B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 -BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ -LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 -DXZDjC5Ty3zfDBeWUA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS RSA4096 Root G5" -# Serial: 11930366277458970227240571539258396554 -# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 -# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 -# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN -MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT -HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN -NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs -IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ -ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 -2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp -wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM -pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD -nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po -sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx -Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd -Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX -KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe -XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL -tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv -TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN -AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw -GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H -PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF -O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ -REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik -AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv -/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ -p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw -MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF -qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK -ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root R1 O=Certainly -# Subject: CN=Certainly Root R1 O=Certainly -# Label: "Certainly Root R1" -# Serial: 188833316161142517227353805653483829216 -# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 -# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af -# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw -PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy -dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 -YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 -1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT -vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed -aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 -1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 -r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 -cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ -wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ -6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA -2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH -Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR -eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB -/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u -d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr -PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d -8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi -1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd -rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di -taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 -lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj -yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn -Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy -yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n -wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 -OV+KmalBWQewLK8= ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root E1 O=Certainly -# Subject: CN=Certainly Root E1 O=Certainly -# Label: "Certainly Root E1" -# Serial: 8168531406727139161245376702891150584 -# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 -# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b -# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 ------BEGIN CERTIFICATE----- -MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw -CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu -bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ -BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s -eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK -+IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 -QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 -hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm -ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG -BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR ------END CERTIFICATE----- - -# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication ECC RootCA1" -# Serial: 15446673492073852651 -# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 -# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 -# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 ------BEGIN CERTIFICATE----- -MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT -AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD -VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx -NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT -HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 -IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl -dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK -ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu -9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O -be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA1" -# Serial: 113562791157148395269083148143378328608 -# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 -# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a -# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU -MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI -T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz -MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF -SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh -bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z -xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ -spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 -58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR -at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll -5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq -nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK -V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ -pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO -z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn -jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ -WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF -7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 -YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli -awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u -+2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 -X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN -SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo -P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI -+pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz -znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 -eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 -YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy -r/6zcCwupvI= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA2" -# Serial: 58605626836079930195615843123109055211 -# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c -# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 -# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 ------BEGIN CERTIFICATE----- -MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw -CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ -VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy -MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ -TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS -b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B -IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ -+kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK -sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA -94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B -43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root E46" -# Serial: 88989738453351742415770396670917916916 -# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 -# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a -# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 ------BEGIN CERTIFICATE----- -MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw -CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T -ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN -MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG -A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT -ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC -WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ -6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B -Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa -qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q -4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root R46" -# Serial: 156256931880233212765902055439220583700 -# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 -# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 -# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 ------BEGIN CERTIFICATE----- -MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD -Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw -HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY -MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp -YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa -ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz -SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf -iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X -ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 -IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS -VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE -SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu -+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt -8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L -HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt -zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P -AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c -mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ -YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 -gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA -Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB -JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX -DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui -TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 -dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 -LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp -0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY -QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS RSA Root CA 2022" -# Serial: 148535279242832292258835760425842727825 -# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da -# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca -# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed ------BEGIN CERTIFICATE----- -MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO -MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD -DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX -DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw -b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC -AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP -L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY -t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins -S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 -PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO -L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 -R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w -dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS -+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS -d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG -AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f -gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j -BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z -NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt -hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM -QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf -R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ -DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW -P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy -lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq -bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w -AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q -r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji -Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU -98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS ECC Root CA 2022" -# Serial: 26605119622390491762507526719404364228 -# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 -# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 -# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 ------BEGIN CERTIFICATE----- -MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT -U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 -MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh -dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm -acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN -SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME -GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW -uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp -15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN -b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA ECC TLS 2021" -# Serial: 81873346711060652204712539181482831616 -# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 -# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd -# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 ------BEGIN CERTIFICATE----- -MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w -LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w -CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 -MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF -Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI -zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X -tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 -AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 -KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD -aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu -CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo -9H1/IISpQuQo ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA RSA TLS 2021" -# Serial: 111436099570196163832749341232207667876 -# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 -# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 -# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f ------BEGIN CERTIFICATE----- -MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM -MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx -MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 -MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD -QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z -4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv -Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ -kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs -GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln -nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh -3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD -0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy -geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 -ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB -c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI -pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS -4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs -o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ -qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw -xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM -rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 -AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR -0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY -o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 -dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE -oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G3" -# Serial: 576386314500428537169965010905813481816650257167 -# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 -# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 -# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 ------BEGIN CERTIFICATE----- -MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM -BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe -Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw -IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU -cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC -DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS -T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK -AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 -nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep -qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA -yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs -hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX -zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv -kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT -f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA -uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB -o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih -MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E -BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 -wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 -XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 -JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j -ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV -VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx -xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on -AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d -7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj -gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV -+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo -FGWsJwt0ivKH ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G4" -# Serial: 451799571007117016466790293371524403291602933463 -# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb -# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a -# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c ------BEGIN CERTIFICATE----- -MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw -WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y -MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD -VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz -dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx -s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw -LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD -pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE -AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR -UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj -/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Label: "CommScope Public Trust ECC Root-01" -# Serial: 385011430473757362783587124273108818652468453534 -# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 -# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d -# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b ------BEGIN CERTIFICATE----- -MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa -Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C -flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE -hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq -hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg -2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS -Um9poIyNStDuiw7LR47QjRE= ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Label: "CommScope Public Trust ECC Root-02" -# Serial: 234015080301808452132356021271193974922492992893 -# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 -# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 -# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a ------BEGIN CERTIFICATE----- -MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa -Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL -j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU -v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq -hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n -ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV -mkzw5l4lIhVtwodZ0LKOag== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Label: "CommScope Public Trust RSA Root-01" -# Serial: 354030733275608256394402989253558293562031411421 -# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 -# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 -# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 -NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk -YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh -suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al -DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj -WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl -P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 -KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p -UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ -kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO -Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB -Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U -CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ -KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 -NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ -nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ -QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v -trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a -aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD -j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 -Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w -lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn -YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc -icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Label: "CommScope Public Trust RSA Root-02" -# Serial: 480062499834624527752716769107743131258796508494 -# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa -# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae -# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 -NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE -NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 -kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C -rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz -hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 -LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs -n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku -FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 -kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 -wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v -wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs -5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ -KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB -KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 -+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme -APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq -pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT -6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF -sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt -PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d -lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 -v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O -rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS ECC Root 2020" -# Serial: 72082518505882327255703894282316633856 -# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd -# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec -# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 ------BEGIN CERTIFICATE----- -MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw -CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH -bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw -MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx -JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE -AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O -tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP -f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA -MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di -z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn -27iQ7t0l ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS RSA Root 2023" -# Serial: 44676229530606711399881795178081572759 -# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 -# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 -# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj -MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 -eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy -MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC -REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG -A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 -cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV -cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA -U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 -Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug -BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy -8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J -co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg -8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 -rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 -mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg -+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX -gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 -p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ -pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm -9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw -M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd -GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ -CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t -xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ -w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK -L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj -X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q -ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm -dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= ------END CERTIFICATE----- - -# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" -# Serial: 65916896770016886708751106294915943533 -# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 -# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 -# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a ------BEGIN CERTIFICATE----- -MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf -e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C -cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O -BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO -PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw -hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG -XSaQpYXFuXqUPoeovQA= ------END CERTIFICATE----- - -# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA CYBER Root CA" -# Serial: 85076849864375384482682434040119489222 -# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 -# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 -# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ -MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 -IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 -WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO -LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg -Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P -40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF -avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ -34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i -JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu -j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf -Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP -2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA -S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA -oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC -kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW -5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd -BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB -AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t -tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn -68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn -TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t -RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx -f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI -Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz -8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 -NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX -xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 -t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA12" -# Serial: 587887345431707215246142177076162061960426065942 -# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 -# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 -# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw -NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF -KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt -p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd -J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur -FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J -hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K -h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF -AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld -mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ -mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA -8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV -55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ -yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA14" -# Serial: 575790784512929437950770173562378038616896959179 -# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 -# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f -# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw -NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ -FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg -vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy -6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo -/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J -kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ -0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib -y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac -18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs -0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB -SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL -ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk -86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E -rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib -ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT -zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS -DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 -2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo -FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy -K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 -dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl -Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB -365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c -JRNItX+S ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA15" -# Serial: 126083514594751269499665114766174399806381178503 -# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 -# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d -# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a ------BEGIN CERTIFICATE----- -MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw -UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM -dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy -NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl -cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 -IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 -wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR -ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT -9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp -4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 -bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 2 2023" -# Serial: 153168538924886464690566649552453098598 -# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 -# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 -# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 ------BEGIN CERTIFICATE----- -MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI -MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE -LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw -OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi -MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr -i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE -gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 -k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT -Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl -2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U -cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP -/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS -uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ -0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N -DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ -XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 -GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG -OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y -XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI -FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n -riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR -VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc -LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn -4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD -hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG -koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 -ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS -Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 -knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ -hJ65bvspmZDogNOfJA== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. -# Label: "TrustAsia TLS ECC Root CA" -# Serial: 310892014698942880364840003424242768478804666567 -# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c -# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 -# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 ------BEGIN CERTIFICATE----- -MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw -WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw -NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE -ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB -c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ -AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp -guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw -DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 -L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR -OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. -# Label: "TrustAsia TLS RSA Root CA" -# Serial: 160405846464868906657516898462547310235378010780 -# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 -# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa -# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 ------BEGIN CERTIFICATE----- -MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM -BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN -MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG -A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 -c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC -AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ -NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ -Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 -HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 -ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb -xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX -i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ -UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j -TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT -bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 -S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 -Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 -iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt -7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp -2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ -g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj -pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M -pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP -XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe -SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 -ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy -323imttUQ/hHWKNddBWcwauwxzQ= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 2 2023" -# Serial: 139766439402180512324132425437959641711 -# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 -# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b -# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce ------BEGIN CERTIFICATE----- -MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI -MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE -LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw -OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi -MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK -F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE -7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe -EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 -lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb -RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV -jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc -jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx -TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ -ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk -hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF -NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH -kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG -OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y -XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 -QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 -pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q -3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU -t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX -cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 -ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT -2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs -7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP -gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst -Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh -XBxvWHZks/wCuPWdCg== ------END CERTIFICATE----- - -# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG -# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG -# Label: "SwissSign RSA TLS Root CA 2022 - 1" -# Serial: 388078645722908516278762308316089881486363258315 -# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 -# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce -# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 ------BEGIN CERTIFICATE----- -MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE -AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx -MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT -d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg -MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX -vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 -LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX -5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE -EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt -/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x -0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 -KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM -0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd -OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta -clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK -wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 -DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL -BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 -10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz -Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ -iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc -gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM -ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF -LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp -zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td -Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 -rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO -gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ ------END CERTIFICATE----- diff --git a/venv/lib/python3.12/site-packages/certifi/core.py b/venv/lib/python3.12/site-packages/certifi/core.py deleted file mode 100644 index 1c9661cc..00000000 --- a/venv/lib/python3.12/site-packages/certifi/core.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem or its contents. -""" -import sys -import atexit - -def exit_cacert_ctx() -> None: - _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] - - -if sys.version_info >= (3, 11): - - from importlib.resources import as_file, files - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the file - # in cases where we're inside of a zipimport situation until someone - # actually calls where(), but we don't want to re-extract the file - # on every call of where(), so we'll do it once then store it in a - # global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you to - # manage the cleanup of this file, so it doesn't actually return a - # path, it returns a context manager that will give you the path - # when you enter it and will do any cleanup when you leave it. In - # the common case of not needing a temporary file, it will just - # return the file system location and the __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") - -else: - - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the - # file in cases where we're inside of a zipimport situation until - # someone actually calls where(), but we don't want to re-extract - # the file on every call of where(), so we'll do it once then store - # it in a global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you - # to manage the cleanup of this file, so it doesn't actually - # return a path, it returns a context manager that will give - # you the path when you enter it and will do any cleanup when - # you leave it. In the common case of not needing a temporary - # file, it will just return the file system location and the - # __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.12/site-packages/certifi/py.typed b/venv/lib/python3.12/site-packages/certifi/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/METADATA b/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/METADATA deleted file mode 100644 index 573d88b9..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/METADATA +++ /dev/null @@ -1,731 +0,0 @@ -Metadata-Version: 2.4 -Name: charset-normalizer -Version: 3.4.2 -Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. -Author-email: "Ahmed R. TAHRI" -Maintainer-email: "Ahmed R. TAHRI" -License: MIT -Project-URL: Changelog, https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md -Project-URL: Documentation, https://charset-normalizer.readthedocs.io/ -Project-URL: Code, https://github.com/jawah/charset_normalizer -Project-URL: Issue tracker, https://github.com/jawah/charset_normalizer/issues -Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Text Processing :: Linguistic -Classifier: Topic :: Utilities -Classifier: Typing :: Typed -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -License-File: LICENSE -Provides-Extra: unicode-backport -Dynamic: license-file - -

Charset Detection, for Everyone 👋

- -

- The Real First Universal Charset Detector
- - - - - Download Count Total - - - - -

-

- Featured Packages
- - Static Badge - - - Static Badge - -

-

- In other language (unofficial port - by the community)
- - Static Badge - -

- -> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, -> I'm trying to resolve the issue by taking a new approach. -> All IANA character set names for which the Python core library provides codecs are supported. - -

- >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< -

- -This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. - -| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | -|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| -| `Fast` | ❌ | ✅ | ✅ | -| `Universal**` | ❌ | ✅ | ❌ | -| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | -| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | -| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ | -| `Native Python` | ✅ | ✅ | ❌ | -| `Detect spoken language` | ❌ | ✅ | N/A | -| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | -| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | -| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | - -

-Reading Normalized TextCat Reading Text -

- -*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
- -## ⚡ Performance - -This package offer better performance than its counterpart Chardet. Here are some numbers. - -| Package | Accuracy | Mean per file (ms) | File per sec (est) | -|-----------------------------------------------|:--------:|:------------------:|:------------------:| -| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec | -| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | - -| Package | 99th percentile | 95th percentile | 50th percentile | -|-----------------------------------------------|:---------------:|:---------------:|:---------------:| -| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms | -| charset-normalizer | 100 ms | 50 ms | 5 ms | - -_updated as of december 2024 using CPython 3.12_ - -Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. - -> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. -> And yes, these results might change at any time. The dataset can be updated to include more files. -> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. -> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability -> (e.g. Supported Encoding) Challenge-them if you want. - -## ✨ Installation - -Using pip: - -```sh -pip install charset-normalizer -U -``` - -## 🚀 Basic Usage - -### CLI -This package comes with a CLI. - -``` -usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] - file [file ...] - -The Real First Universal Charset Detector. Discover originating encoding used -on text file. Normalize text to unicode. - -positional arguments: - files File(s) to be analysed - -optional arguments: - -h, --help show this help message and exit - -v, --verbose Display complementary information about file if any. - Stdout will contain logs about the detection process. - -a, --with-alternative - Output complementary possibilities if any. Top-level - JSON WILL be a list. - -n, --normalize Permit to normalize input file. If not set, program - does not write anything. - -m, --minimal Only output the charset detected to STDOUT. Disabling - JSON output. - -r, --replace Replace file when trying to normalize it instead of - creating a new one. - -f, --force Replace file without asking if you are sure, use this - flag with caution. - -t THRESHOLD, --threshold THRESHOLD - Define a custom maximum amount of chaos allowed in - decoded content. 0. <= chaos <= 1. - --version Show version information and exit. -``` - -```bash -normalizer ./data/sample.1.fr.srt -``` - -or - -```bash -python -m charset_normalizer ./data/sample.1.fr.srt -``` - -🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. - -```json -{ - "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", - "encoding": "cp1252", - "encoding_aliases": [ - "1252", - "windows_1252" - ], - "alternative_encodings": [ - "cp1254", - "cp1256", - "cp1258", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - "mbcs" - ], - "language": "French", - "alphabets": [ - "Basic Latin", - "Latin-1 Supplement" - ], - "has_sig_or_bom": false, - "chaos": 0.149, - "coherence": 97.152, - "unicode_path": null, - "is_preferred": true -} -``` - -### Python -*Just print out normalized text* -```python -from charset_normalizer import from_path - -results = from_path('./my_subtitle.srt') - -print(str(results.best())) -``` - -*Upgrade your code without effort* -```python -from charset_normalizer import detect -``` - -The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. - -See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) - -## 😇 Why - -When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a -reliable alternative using a completely different method. Also! I never back down on a good challenge! - -I **don't care** about the **originating charset** encoding, because **two different tables** can -produce **two identical rendered string.** -What I want is to get readable text, the best I can. - -In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 - -Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. - -## 🍰 How - - - Discard all charset encoding table that could not fit the binary content. - - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. - - Extract matches with the lowest mess detected. - - Additionally, we measure coherence / probe for a language. - -**Wait a minute**, what is noise/mess and coherence according to **YOU ?** - -*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then -**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text). - I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to - improve or rewrite it. - -*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought -that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. - -## ⚡ Known limitations - - - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) - - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. - -## ⚠️ About Python EOLs - -**If you are running:** - -- Python >=2.7,<3.5: Unsupported -- Python 3.5: charset-normalizer < 2.1 -- Python 3.6: charset-normalizer < 3.1 -- Python 3.7: charset-normalizer < 4.0 - -Upgrade your Python interpreter as soon as possible. - -## 👤 Contributing - -Contributions, issues and feature requests are very much welcome.
-Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. - -## 📝 License - -Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
-This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. - -Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) - -## 💼 For Enterprise - -Professional support for charset-normalizer is available as part of the [Tidelift -Subscription][1]. Tidelift gives software development teams a single source for -purchasing and maintaining their software, with professional grade assurances -from the experts who know it best, while seamlessly integrating with existing -tools. - -[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme - -[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297) - -# Changelog -All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02) - -### Fixed -- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591) -- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587) - -### Changed -- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8 - -## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24) - -### Changed -- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend. -- Enforce annotation delayed loading for a simpler and consistent types in the project. -- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8 - -### Added -- pre-commit configuration. -- noxfile. - -### Removed -- `build-requirements.txt` as per using `pyproject.toml` native build configuration. -- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile). -- `setup.cfg` in favor of `pyproject.toml` metadata configuration. -- Unused `utils.range_scan` function. - -### Fixed -- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572) -- Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+ - -## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08) - -### Added -- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints. -- Support for Python 3.13 (#512) - -### Fixed -- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch. -- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537) -- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381) - -## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) - -### Fixed -- Unintentional memory usage regression when using large payload that match several encoding (#376) -- Regression on some detection case showcased in the documentation (#371) - -### Added -- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) - -## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) - -### Changed -- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 -- Improved the general detection reliability based on reports from the community - -## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) - -### Added -- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` -- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) - -### Removed -- (internal) Redundant utils.is_ascii function and unused function is_private_use_only -- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant - -### Changed -- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection -- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 - -### Fixed -- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) - -## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) - -### Changed -- Typehint for function `from_path` no longer enforce `PathLike` as its first argument -- Minor improvement over the global detection reliability - -### Added -- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries -- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) -- Explicit support for Python 3.12 - -### Fixed -- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) - -## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) - -### Added -- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) - -### Removed -- Support for Python 3.6 (PR #260) - -### Changed -- Optional speedup provided by mypy/c 1.0.1 - -## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) - -### Fixed -- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) - -### Changed -- Speedup provided by mypy/c 0.990 on Python >= 3.7 - -## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it -- Sphinx warnings when generating the documentation - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' - -## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) - -### Added -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Removed -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) - -### Fixed -- Sphinx warnings when generating the documentation - -## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) - -### Changed -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Removed -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) - -### Deprecated -- Function `normalize` scheduled for removal in 3.0 - -### Changed -- Removed useless call to decode in fn is_unprintable (#206) - -### Fixed -- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) - -## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) - -### Added -- Output the Unicode table version when running the CLI with `--version` (PR #194) - -### Changed -- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) -- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) - -### Fixed -- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) -- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) - -### Removed -- Support for Python 3.5 (PR #192) - -### Deprecated -- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) - -## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) - -### Fixed -- ASCII miss-detection on rare cases (PR #170) - -## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) - -### Added -- Explicit support for Python 3.11 (PR #164) - -### Changed -- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) - -## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) - -### Fixed -- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) - -### Changed -- Skipping the language-detection (CD) on ASCII (PR #155) - -## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) - -### Changed -- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) - -### Fixed -- Wrong logging level applied when setting kwarg `explain` to True (PR #146) - -## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) -### Changed -- Improvement over Vietnamese detection (PR #126) -- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) -- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) -- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) -- Code style as refactored by Sourcery-AI (PR #131) -- Minor adjustment on the MD around european words (PR #133) -- Remove and replace SRTs from assets / tests (PR #139) -- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) - -### Fixed -- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) -- Avoid using too insignificant chunk (PR #137) - -### Added -- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) - -## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) -### Added -- Add support for Kazakh (Cyrillic) language detection (PR #109) - -### Changed -- Further, improve inferring the language from a given single-byte code page (PR #112) -- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) -- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) -- Various detection improvement (MD+CD) (PR #117) - -### Removed -- Remove redundant logging entry about detected language(s) (PR #115) - -### Fixed -- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) - -## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) -### Fixed -- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) -- Fix CLI crash when using --minimal output in certain cases (PR #103) - -### Changed -- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) - -## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) -### Changed -- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) -- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) -- The Unicode detection is slightly improved (PR #93) -- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) - -### Removed -- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) - -### Fixed -- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) -- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) -- The MANIFEST.in was not exhaustive (PR #78) - -## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) -### Fixed -- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) -- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) -- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) -- Submatch factoring could be wrong in rare edge cases (PR #72) -- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) -- Fix line endings from CRLF to LF for certain project files (PR #67) - -### Changed -- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) -- Allow fallback on specified encoding if any (PR #71) - -## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) -### Changed -- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) -- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) - -## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) -### Fixed -- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) - -### Changed -- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) - -## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) -### Fixed -- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) -- Using explain=False permanently disable the verbose output in the current runtime (PR #47) -- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) -- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) - -### Changed -- Public function normalize default args values were not aligned with from_bytes (PR #53) - -### Added -- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) - -## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) -### Changed -- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. -- Accent has been made on UTF-8 detection, should perform rather instantaneous. -- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. -- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) -- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ -- utf_7 detection has been reinstated. - -### Removed -- This package no longer require anything when used with Python 3.5 (Dropped cached_property) -- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. -- The exception hook on UnicodeDecodeError has been removed. - -### Deprecated -- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 - -### Fixed -- The CLI output used the relative path of the file(s). Should be absolute. - -## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) -### Fixed -- Logger configuration/usage no longer conflict with others (PR #44) - -## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) -### Removed -- Using standard logging instead of using the package loguru. -- Dropping nose test framework in favor of the maintained pytest. -- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. -- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. -- Stop support for UTF-7 that does not contain a SIG. -- Dropping PrettyTable, replaced with pure JSON output in CLI. - -### Fixed -- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. -- Not searching properly for the BOM when trying utf32/16 parent codec. - -### Changed -- Improving the package final size by compressing frequencies.json. -- Huge improvement over the larges payload. - -### Added -- CLI now produces JSON consumable output. -- Return ASCII if given sequences fit. Given reasonable confidence. - -## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) - -### Fixed -- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) - -## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) - -### Fixed -- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) - -## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) - -### Fixed -- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) - -## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) - -### Changed -- Amend the previous release to allow prettytable 2.0 (PR #35) - -## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) - -### Fixed -- Fix error while using the package with a python pre-release interpreter (PR #33) - -### Changed -- Dependencies refactoring, constraints revised. - -### Added -- Add python 3.9 and 3.10 to the supported interpreters - -MIT License - -Copyright (c) 2025 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/RECORD b/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/RECORD deleted file mode 100644 index 55632fd5..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/RECORD +++ /dev/null @@ -1,35 +0,0 @@ -../../../bin/normalizer,sha256=n0AooZbFcjKp0DkAOKLIMTVEQT9qjj1uhHF1BcyBRN4,254 -charset_normalizer-3.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -charset_normalizer-3.4.2.dist-info/METADATA,sha256=zNVWE4rQW-YZIMHKSayMypu37bLj_xayLorHl0-HAHQ,35743 -charset_normalizer-3.4.2.dist-info/RECORD,, -charset_normalizer-3.4.2.dist-info/WHEEL,sha256=escAp0nQBKBkW_DNq5usPAJtWAtnzdSwKaeRcFbdm08,151 -charset_normalizer-3.4.2.dist-info/entry_points.txt,sha256=8C-Y3iXIfyXQ83Tpir2B8t-XLJYpxF5xbb38d_js-h4,65 -charset_normalizer-3.4.2.dist-info/licenses/LICENSE,sha256=bQ1Bv-FwrGx9wkjJpj4lTQ-0WmDVCoJX0K-SxuJJuIc,1071 -charset_normalizer-3.4.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 -charset_normalizer/__init__.py,sha256=OKRxRv2Zhnqk00tqkN0c1BtJjm165fWXLydE52IKuHc,1590 -charset_normalizer/__main__.py,sha256=yzYxMR-IhKRHYwcSlavEv8oGdwxsR89mr2X09qXGdps,109 -charset_normalizer/__pycache__/__init__.cpython-312.pyc,, -charset_normalizer/__pycache__/__main__.cpython-312.pyc,, -charset_normalizer/__pycache__/api.cpython-312.pyc,, -charset_normalizer/__pycache__/cd.cpython-312.pyc,, -charset_normalizer/__pycache__/constant.cpython-312.pyc,, -charset_normalizer/__pycache__/legacy.cpython-312.pyc,, -charset_normalizer/__pycache__/md.cpython-312.pyc,, -charset_normalizer/__pycache__/models.cpython-312.pyc,, -charset_normalizer/__pycache__/utils.cpython-312.pyc,, -charset_normalizer/__pycache__/version.cpython-312.pyc,, -charset_normalizer/api.py,sha256=qBRz8mJ_R5E713R6TOyqHEdnmyxbEDnCSHvx32ubDGg,22617 -charset_normalizer/cd.py,sha256=WKTo1HDb-H9HfCDc3Bfwq5jzS25Ziy9SE2a74SgTq88,12522 -charset_normalizer/cli/__init__.py,sha256=D8I86lFk2-py45JvqxniTirSj_sFyE6sjaY_0-G1shc,136 -charset_normalizer/cli/__main__.py,sha256=dMaXG6IJXRvqq8z2tig7Qb83-BpWTln55ooiku5_uvg,12646 -charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc,, -charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc,, -charset_normalizer/constant.py,sha256=7UVY4ldYhmQMHUdgQ_sgZmzcQ0xxYxpBunqSZ-XJZ8U,42713 -charset_normalizer/legacy.py,sha256=SZE_AJujOYB1y9Y3-FkFOW9Ye4H1EHTzPbZR8DEsgl8,2287 -charset_normalizer/md.cpython-312-x86_64-linux-gnu.so,sha256=u_tcYtClfGbflvoDRpoO4ph7Uao7zmkdgF2Q0Rf1GNU,16120 -charset_normalizer/md.py,sha256=-_oN3h3_X99nkFfqamD3yu45DC_wfk5odH0Tr_CQiXs,20145 -charset_normalizer/md__mypyc.cpython-312-x86_64-linux-gnu.so,sha256=Vd-fx1sAk98CzIjJ2N72eqUZlX1W2rO6r3BG2-xJT0I,280856 -charset_normalizer/models.py,sha256=lKXhOnIPtiakbK3i__J9wpOfzx3JDTKj7Dn3Rg0VaRI,12394 -charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -charset_normalizer/utils.py,sha256=sTejPgrdlNsKNucZfJCxJ95lMTLA0ShHLLE3n5wpT9Q,12170 -charset_normalizer/version.py,sha256=koLXlDwKAU5IlYP4qkOpFnsncn74hlphHAlBhlDKzyw,115 diff --git a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/WHEEL b/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/WHEEL deleted file mode 100644 index 32f281b1..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (80.1.0) -Root-Is-Purelib: false -Tag: cp312-cp312-manylinux_2_17_x86_64 -Tag: cp312-cp312-manylinux2014_x86_64 - diff --git a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/entry_points.txt deleted file mode 100644 index ec920125..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -normalizer = charset_normalizer:cli.cli_detect diff --git a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/licenses/LICENSE deleted file mode 100644 index 9725772c..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2025 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/top_level.txt deleted file mode 100644 index 66958f0a..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer-3.4.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -charset_normalizer diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__init__.py b/venv/lib/python3.12/site-packages/charset_normalizer/__init__.py deleted file mode 100644 index 0d3a3799..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Charset-Normalizer -~~~~~~~~~~~~~~ -The Real First Universal Charset Detector. -A library that helps you read text from an unknown charset encoding. -Motivated by chardet, This package is trying to resolve the issue by taking a new approach. -All IANA character set names for which the Python core library provides codecs are supported. - -Basic usage: - >>> from charset_normalizer import from_bytes - >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) - >>> best_guess = results.best() - >>> str(best_guess) - 'Bсеки човек има право на образование. Oбразованието!' - -Others methods and usages are available - see the full documentation -at . -:copyright: (c) 2021 by Ahmed TAHRI -:license: MIT, see LICENSE for more details. -""" - -from __future__ import annotations - -import logging - -from .api import from_bytes, from_fp, from_path, is_binary -from .legacy import detect -from .models import CharsetMatch, CharsetMatches -from .utils import set_logging_handler -from .version import VERSION, __version__ - -__all__ = ( - "from_fp", - "from_path", - "from_bytes", - "is_binary", - "detect", - "CharsetMatch", - "CharsetMatches", - "__version__", - "VERSION", - "set_logging_handler", -) - -# Attach a NullHandler to the top level logger by default -# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library - -logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__main__.py b/venv/lib/python3.12/site-packages/charset_normalizer/__main__.py deleted file mode 100644 index e0e76f7b..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -from __future__ import annotations - -from .cli import cli_detect - -if __name__ == "__main__": - cli_detect() diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index dcf503cd..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index 70b3d354..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/api.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/api.cpython-312.pyc deleted file mode 100644 index a7e19203..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/api.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/cd.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/cd.cpython-312.pyc deleted file mode 100644 index ef69bcba..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/cd.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/constant.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/constant.cpython-312.pyc deleted file mode 100644 index 0f8eec48..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/constant.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/legacy.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/legacy.cpython-312.pyc deleted file mode 100644 index 944a633e..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/legacy.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/md.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/md.cpython-312.pyc deleted file mode 100644 index 6e1e8b0f..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/md.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/models.cpython-312.pyc deleted file mode 100644 index 34f88996..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index ebd86615..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/version.cpython-312.pyc deleted file mode 100644 index 1706f9a2..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/__pycache__/version.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/api.py b/venv/lib/python3.12/site-packages/charset_normalizer/api.py deleted file mode 100644 index 2c8c0618..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/api.py +++ /dev/null @@ -1,668 +0,0 @@ -from __future__ import annotations - -import logging -from os import PathLike -from typing import BinaryIO - -from .cd import ( - coherence_ratio, - encoding_languages, - mb_encoding_languages, - merge_coherence_ratios, -) -from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE -from .md import mess_ratio -from .models import CharsetMatch, CharsetMatches -from .utils import ( - any_specified_encoding, - cut_sequence_chunks, - iana_name, - identify_sig_or_bom, - is_cp_similar, - is_multi_byte_encoding, - should_strip_sig_or_bom, -) - -logger = logging.getLogger("charset_normalizer") -explain_handler = logging.StreamHandler() -explain_handler.setFormatter( - logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") -) - - -def from_bytes( - sequences: bytes | bytearray, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.2, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Given a raw bytes sequence, return the best possibles charset usable to render str objects. - If there is no results, it is a strong indicator that the source is binary/not text. - By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. - And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. - - The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page - but never take it for granted. Can improve the performance. - - You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that - purpose. - - This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. - By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' - toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. - Custom logging format and handler can be set manually. - """ - - if not isinstance(sequences, (bytearray, bytes)): - raise TypeError( - "Expected object of type bytes or bytearray, got: {}".format( - type(sequences) - ) - ) - - if explain: - previous_logger_level: int = logger.level - logger.addHandler(explain_handler) - logger.setLevel(TRACE) - - length: int = len(sequences) - - if length == 0: - logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level or logging.WARNING) - return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) - - if cp_isolation is not None: - logger.log( - TRACE, - "cp_isolation is set. use this flag for debugging purpose. " - "limited list of encoding allowed : %s.", - ", ".join(cp_isolation), - ) - cp_isolation = [iana_name(cp, False) for cp in cp_isolation] - else: - cp_isolation = [] - - if cp_exclusion is not None: - logger.log( - TRACE, - "cp_exclusion is set. use this flag for debugging purpose. " - "limited list of encoding excluded : %s.", - ", ".join(cp_exclusion), - ) - cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] - else: - cp_exclusion = [] - - if length <= (chunk_size * steps): - logger.log( - TRACE, - "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", - steps, - chunk_size, - length, - ) - steps = 1 - chunk_size = length - - if steps > 1 and length / steps < chunk_size: - chunk_size = int(length / steps) - - is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE - is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE - - if is_too_small_sequence: - logger.log( - TRACE, - "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( - length - ), - ) - elif is_too_large_sequence: - logger.log( - TRACE, - "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( - length - ), - ) - - prioritized_encodings: list[str] = [] - - specified_encoding: str | None = ( - any_specified_encoding(sequences) if preemptive_behaviour else None - ) - - if specified_encoding is not None: - prioritized_encodings.append(specified_encoding) - logger.log( - TRACE, - "Detected declarative mark in sequence. Priority +1 given for %s.", - specified_encoding, - ) - - tested: set[str] = set() - tested_but_hard_failure: list[str] = [] - tested_but_soft_failure: list[str] = [] - - fallback_ascii: CharsetMatch | None = None - fallback_u8: CharsetMatch | None = None - fallback_specified: CharsetMatch | None = None - - results: CharsetMatches = CharsetMatches() - - early_stop_results: CharsetMatches = CharsetMatches() - - sig_encoding, sig_payload = identify_sig_or_bom(sequences) - - if sig_encoding is not None: - prioritized_encodings.append(sig_encoding) - logger.log( - TRACE, - "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", - len(sig_payload), - sig_encoding, - ) - - prioritized_encodings.append("ascii") - - if "utf_8" not in prioritized_encodings: - prioritized_encodings.append("utf_8") - - for encoding_iana in prioritized_encodings + IANA_SUPPORTED: - if cp_isolation and encoding_iana not in cp_isolation: - continue - - if cp_exclusion and encoding_iana in cp_exclusion: - continue - - if encoding_iana in tested: - continue - - tested.add(encoding_iana) - - decoded_payload: str | None = None - bom_or_sig_available: bool = sig_encoding == encoding_iana - strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( - encoding_iana - ) - - if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", - encoding_iana, - ) - continue - if encoding_iana in {"utf_7"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", - encoding_iana, - ) - continue - - try: - is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) - except (ModuleNotFoundError, ImportError): - logger.log( - TRACE, - "Encoding %s does not provide an IncrementalDecoder", - encoding_iana, - ) - continue - - try: - if is_too_large_sequence and is_multi_byte_decoder is False: - str( - ( - sequences[: int(50e4)] - if strip_sig_or_bom is False - else sequences[len(sig_payload) : int(50e4)] - ), - encoding=encoding_iana, - ) - else: - decoded_payload = str( - ( - sequences - if strip_sig_or_bom is False - else sequences[len(sig_payload) :] - ), - encoding=encoding_iana, - ) - except (UnicodeDecodeError, LookupError) as e: - if not isinstance(e, LookupError): - logger.log( - TRACE, - "Code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - similar_soft_failure_test: bool = False - - for encoding_soft_failed in tested_but_soft_failure: - if is_cp_similar(encoding_iana, encoding_soft_failed): - similar_soft_failure_test = True - break - - if similar_soft_failure_test: - logger.log( - TRACE, - "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", - encoding_iana, - encoding_soft_failed, - ) - continue - - r_ = range( - 0 if not bom_or_sig_available else len(sig_payload), - length, - int(length / steps), - ) - - multi_byte_bonus: bool = ( - is_multi_byte_decoder - and decoded_payload is not None - and len(decoded_payload) < length - ) - - if multi_byte_bonus: - logger.log( - TRACE, - "Code page %s is a multi byte encoding table and it appear that at least one character " - "was encoded using n-bytes.", - encoding_iana, - ) - - max_chunk_gave_up: int = int(len(r_) / 4) - - max_chunk_gave_up = max(max_chunk_gave_up, 2) - early_stop_count: int = 0 - lazy_str_hard_failure = False - - md_chunks: list[str] = [] - md_ratios = [] - - try: - for chunk in cut_sequence_chunks( - sequences, - encoding_iana, - r_, - chunk_size, - bom_or_sig_available, - strip_sig_or_bom, - sig_payload, - is_multi_byte_decoder, - decoded_payload, - ): - md_chunks.append(chunk) - - md_ratios.append( - mess_ratio( - chunk, - threshold, - explain is True and 1 <= len(cp_isolation) <= 2, - ) - ) - - if md_ratios[-1] >= threshold: - early_stop_count += 1 - - if (early_stop_count >= max_chunk_gave_up) or ( - bom_or_sig_available and strip_sig_or_bom is False - ): - break - except ( - UnicodeDecodeError - ) as e: # Lazy str loading may have missed something there - logger.log( - TRACE, - "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - early_stop_count = max_chunk_gave_up - lazy_str_hard_failure = True - - # We might want to check the sequence again with the whole content - # Only if initial MD tests passes - if ( - not lazy_str_hard_failure - and is_too_large_sequence - and not is_multi_byte_decoder - ): - try: - sequences[int(50e3) :].decode(encoding_iana, errors="strict") - except UnicodeDecodeError as e: - logger.log( - TRACE, - "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 - if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: - tested_but_soft_failure.append(encoding_iana) - logger.log( - TRACE, - "%s was excluded because of initial chaos probing. Gave up %i time(s). " - "Computed mean chaos is %f %%.", - encoding_iana, - early_stop_count, - round(mean_mess_ratio * 100, ndigits=3), - ) - # Preparing those fallbacks in case we got nothing. - if ( - enable_fallback - and encoding_iana in ["ascii", "utf_8", specified_encoding] - and not lazy_str_hard_failure - ): - fallback_entry = CharsetMatch( - sequences, - encoding_iana, - threshold, - False, - [], - decoded_payload, - preemptive_declaration=specified_encoding, - ) - if encoding_iana == specified_encoding: - fallback_specified = fallback_entry - elif encoding_iana == "ascii": - fallback_ascii = fallback_entry - else: - fallback_u8 = fallback_entry - continue - - logger.log( - TRACE, - "%s passed initial chaos probing. Mean measured chaos is %f %%", - encoding_iana, - round(mean_mess_ratio * 100, ndigits=3), - ) - - if not is_multi_byte_decoder: - target_languages: list[str] = encoding_languages(encoding_iana) - else: - target_languages = mb_encoding_languages(encoding_iana) - - if target_languages: - logger.log( - TRACE, - "{} should target any language(s) of {}".format( - encoding_iana, str(target_languages) - ), - ) - - cd_ratios = [] - - # We shall skip the CD when its about ASCII - # Most of the time its not relevant to run "language-detection" on it. - if encoding_iana != "ascii": - for chunk in md_chunks: - chunk_languages = coherence_ratio( - chunk, - language_threshold, - ",".join(target_languages) if target_languages else None, - ) - - cd_ratios.append(chunk_languages) - - cd_ratios_merged = merge_coherence_ratios(cd_ratios) - - if cd_ratios_merged: - logger.log( - TRACE, - "We detected language {} using {}".format( - cd_ratios_merged, encoding_iana - ), - ) - - current_match = CharsetMatch( - sequences, - encoding_iana, - mean_mess_ratio, - bom_or_sig_available, - cd_ratios_merged, - ( - decoded_payload - if ( - is_too_large_sequence is False - or encoding_iana in [specified_encoding, "ascii", "utf_8"] - ) - else None - ), - preemptive_declaration=specified_encoding, - ) - - results.append(current_match) - - if ( - encoding_iana in [specified_encoding, "ascii", "utf_8"] - and mean_mess_ratio < 0.1 - ): - # If md says nothing to worry about, then... stop immediately! - if mean_mess_ratio == 0.0: - logger.debug( - "Encoding detection: %s is most likely the one.", - current_match.encoding, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([current_match]) - - early_stop_results.append(current_match) - - if ( - len(early_stop_results) - and (specified_encoding is None or specified_encoding in tested) - and "ascii" in tested - and "utf_8" in tested - ): - probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment] - logger.debug( - "Encoding detection: %s is most likely the one.", - probable_result.encoding, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return CharsetMatches([probable_result]) - - if encoding_iana == sig_encoding: - logger.debug( - "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " - "the beginning of the sequence.", - encoding_iana, - ) - if explain: # Defensive: ensure exit path clean handler - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([results[encoding_iana]]) - - if len(results) == 0: - if fallback_u8 or fallback_ascii or fallback_specified: - logger.log( - TRACE, - "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", - ) - - if fallback_specified: - logger.debug( - "Encoding detection: %s will be used as a fallback match", - fallback_specified.encoding, - ) - results.append(fallback_specified) - elif ( - (fallback_u8 and fallback_ascii is None) - or ( - fallback_u8 - and fallback_ascii - and fallback_u8.fingerprint != fallback_ascii.fingerprint - ) - or (fallback_u8 is not None) - ): - logger.debug("Encoding detection: utf_8 will be used as a fallback match") - results.append(fallback_u8) - elif fallback_ascii: - logger.debug("Encoding detection: ascii will be used as a fallback match") - results.append(fallback_ascii) - - if results: - logger.debug( - "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", - results.best().encoding, # type: ignore - len(results) - 1, - ) - else: - logger.debug("Encoding detection: Unable to determine any suitable charset.") - - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return results - - -def from_fp( - fp: BinaryIO, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but using a file pointer that is already ready. - Will not close the file pointer. - """ - return from_bytes( - fp.read(), - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def from_path( - path: str | bytes | PathLike, # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. - Can raise IOError. - """ - with open(path, "rb") as fp: - return from_fp( - fp, - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def is_binary( - fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: list[str] | None = None, - cp_exclusion: list[str] | None = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = False, -) -> bool: - """ - Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. - Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match - are disabled to be stricter around ASCII-compatible but unlikely to be a string. - """ - if isinstance(fp_or_path_or_payload, (str, PathLike)): - guesses = from_path( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - elif isinstance( - fp_or_path_or_payload, - ( - bytes, - bytearray, - ), - ): - guesses = from_bytes( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - else: - guesses = from_fp( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - - return not guesses diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/cd.py b/venv/lib/python3.12/site-packages/charset_normalizer/cd.py deleted file mode 100644 index 71a3ed51..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/cd.py +++ /dev/null @@ -1,395 +0,0 @@ -from __future__ import annotations - -import importlib -from codecs import IncrementalDecoder -from collections import Counter -from functools import lru_cache -from typing import Counter as TypeCounter - -from .constant import ( - FREQUENCIES, - KO_NAMES, - LANGUAGE_SUPPORTED_COUNT, - TOO_SMALL_SEQUENCE, - ZH_NAMES, -) -from .md import is_suspiciously_successive_range -from .models import CoherenceMatches -from .utils import ( - is_accentuated, - is_latin, - is_multi_byte_encoding, - is_unicode_range_secondary, - unicode_range, -) - - -def encoding_unicode_range(iana_name: str) -> list[str]: - """ - Return associated unicode ranges in a single byte code page. - """ - if is_multi_byte_encoding(iana_name): - raise OSError("Function not supported on multi-byte code page") - - decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder - - p: IncrementalDecoder = decoder(errors="ignore") - seen_ranges: dict[str, int] = {} - character_count: int = 0 - - for i in range(0x40, 0xFF): - chunk: str = p.decode(bytes([i])) - - if chunk: - character_range: str | None = unicode_range(chunk) - - if character_range is None: - continue - - if is_unicode_range_secondary(character_range) is False: - if character_range not in seen_ranges: - seen_ranges[character_range] = 0 - seen_ranges[character_range] += 1 - character_count += 1 - - return sorted( - [ - character_range - for character_range in seen_ranges - if seen_ranges[character_range] / character_count >= 0.15 - ] - ) - - -def unicode_range_languages(primary_range: str) -> list[str]: - """ - Return inferred languages used with a unicode range. - """ - languages: list[str] = [] - - for language, characters in FREQUENCIES.items(): - for character in characters: - if unicode_range(character) == primary_range: - languages.append(language) - break - - return languages - - -@lru_cache() -def encoding_languages(iana_name: str) -> list[str]: - """ - Single-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - unicode_ranges: list[str] = encoding_unicode_range(iana_name) - primary_range: str | None = None - - for specified_range in unicode_ranges: - if "Latin" not in specified_range: - primary_range = specified_range - break - - if primary_range is None: - return ["Latin Based"] - - return unicode_range_languages(primary_range) - - -@lru_cache() -def mb_encoding_languages(iana_name: str) -> list[str]: - """ - Multi-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - if ( - iana_name.startswith("shift_") - or iana_name.startswith("iso2022_jp") - or iana_name.startswith("euc_j") - or iana_name == "cp932" - ): - return ["Japanese"] - if iana_name.startswith("gb") or iana_name in ZH_NAMES: - return ["Chinese"] - if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: - return ["Korean"] - - return [] - - -@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) -def get_target_features(language: str) -> tuple[bool, bool]: - """ - Determine main aspects from a supported language if it contains accents and if is pure Latin. - """ - target_have_accents: bool = False - target_pure_latin: bool = True - - for character in FREQUENCIES[language]: - if not target_have_accents and is_accentuated(character): - target_have_accents = True - if target_pure_latin and is_latin(character) is False: - target_pure_latin = False - - return target_have_accents, target_pure_latin - - -def alphabet_languages( - characters: list[str], ignore_non_latin: bool = False -) -> list[str]: - """ - Return associated languages associated to given characters. - """ - languages: list[tuple[str, float]] = [] - - source_have_accents = any(is_accentuated(character) for character in characters) - - for language, language_characters in FREQUENCIES.items(): - target_have_accents, target_pure_latin = get_target_features(language) - - if ignore_non_latin and target_pure_latin is False: - continue - - if target_have_accents is False and source_have_accents: - continue - - character_count: int = len(language_characters) - - character_match_count: int = len( - [c for c in language_characters if c in characters] - ) - - ratio: float = character_match_count / character_count - - if ratio >= 0.2: - languages.append((language, ratio)) - - languages = sorted(languages, key=lambda x: x[1], reverse=True) - - return [compatible_language[0] for compatible_language in languages] - - -def characters_popularity_compare( - language: str, ordered_characters: list[str] -) -> float: - """ - Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. - The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). - Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) - """ - if language not in FREQUENCIES: - raise ValueError(f"{language} not available") - - character_approved_count: int = 0 - FREQUENCIES_language_set = set(FREQUENCIES[language]) - - ordered_characters_count: int = len(ordered_characters) - target_language_characters_count: int = len(FREQUENCIES[language]) - - large_alphabet: bool = target_language_characters_count > 26 - - for character, character_rank in zip( - ordered_characters, range(0, ordered_characters_count) - ): - if character not in FREQUENCIES_language_set: - continue - - character_rank_in_language: int = FREQUENCIES[language].index(character) - expected_projection_ratio: float = ( - target_language_characters_count / ordered_characters_count - ) - character_rank_projection: int = int(character_rank * expected_projection_ratio) - - if ( - large_alphabet is False - and abs(character_rank_projection - character_rank_in_language) > 4 - ): - continue - - if ( - large_alphabet is True - and abs(character_rank_projection - character_rank_in_language) - < target_language_characters_count / 3 - ): - character_approved_count += 1 - continue - - characters_before_source: list[str] = FREQUENCIES[language][ - 0:character_rank_in_language - ] - characters_after_source: list[str] = FREQUENCIES[language][ - character_rank_in_language: - ] - characters_before: list[str] = ordered_characters[0:character_rank] - characters_after: list[str] = ordered_characters[character_rank:] - - before_match_count: int = len( - set(characters_before) & set(characters_before_source) - ) - - after_match_count: int = len( - set(characters_after) & set(characters_after_source) - ) - - if len(characters_before_source) == 0 and before_match_count <= 4: - character_approved_count += 1 - continue - - if len(characters_after_source) == 0 and after_match_count <= 4: - character_approved_count += 1 - continue - - if ( - before_match_count / len(characters_before_source) >= 0.4 - or after_match_count / len(characters_after_source) >= 0.4 - ): - character_approved_count += 1 - continue - - return character_approved_count / len(ordered_characters) - - -def alpha_unicode_split(decoded_sequence: str) -> list[str]: - """ - Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. - Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; - One containing the latin letters and the other hebrew. - """ - layers: dict[str, str] = {} - - for character in decoded_sequence: - if character.isalpha() is False: - continue - - character_range: str | None = unicode_range(character) - - if character_range is None: - continue - - layer_target_range: str | None = None - - for discovered_range in layers: - if ( - is_suspiciously_successive_range(discovered_range, character_range) - is False - ): - layer_target_range = discovered_range - break - - if layer_target_range is None: - layer_target_range = character_range - - if layer_target_range not in layers: - layers[layer_target_range] = character.lower() - continue - - layers[layer_target_range] += character.lower() - - return list(layers.values()) - - -def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches: - """ - This function merge results previously given by the function coherence_ratio. - The return type is the same as coherence_ratio. - """ - per_language_ratios: dict[str, list[float]] = {} - for result in results: - for sub_result in result: - language, ratio = sub_result - if language not in per_language_ratios: - per_language_ratios[language] = [ratio] - continue - per_language_ratios[language].append(ratio) - - merge = [ - ( - language, - round( - sum(per_language_ratios[language]) / len(per_language_ratios[language]), - 4, - ), - ) - for language in per_language_ratios - ] - - return sorted(merge, key=lambda x: x[1], reverse=True) - - -def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: - """ - We shall NOT return "English—" in CoherenceMatches because it is an alternative - of "English". This function only keeps the best match and remove the em-dash in it. - """ - index_results: dict[str, list[float]] = dict() - - for result in results: - language, ratio = result - no_em_name: str = language.replace("—", "") - - if no_em_name not in index_results: - index_results[no_em_name] = [] - - index_results[no_em_name].append(ratio) - - if any(len(index_results[e]) > 1 for e in index_results): - filtered_results: CoherenceMatches = [] - - for language in index_results: - filtered_results.append((language, max(index_results[language]))) - - return filtered_results - - return results - - -@lru_cache(maxsize=2048) -def coherence_ratio( - decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None -) -> CoherenceMatches: - """ - Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. - A layer = Character extraction by alphabets/ranges. - """ - - results: list[tuple[str, float]] = [] - ignore_non_latin: bool = False - - sufficient_match_count: int = 0 - - lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] - if "Latin Based" in lg_inclusion_list: - ignore_non_latin = True - lg_inclusion_list.remove("Latin Based") - - for layer in alpha_unicode_split(decoded_sequence): - sequence_frequencies: TypeCounter[str] = Counter(layer) - most_common = sequence_frequencies.most_common() - - character_count: int = sum(o for c, o in most_common) - - if character_count <= TOO_SMALL_SEQUENCE: - continue - - popular_character_ordered: list[str] = [c for c, o in most_common] - - for language in lg_inclusion_list or alphabet_languages( - popular_character_ordered, ignore_non_latin - ): - ratio: float = characters_popularity_compare( - language, popular_character_ordered - ) - - if ratio < threshold: - continue - elif ratio >= 0.8: - sufficient_match_count += 1 - - results.append((language, round(ratio, 4))) - - if sufficient_match_count >= 3: - break - - return sorted( - filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True - ) diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__init__.py b/venv/lib/python3.12/site-packages/charset_normalizer/cli/__init__.py deleted file mode 100644 index 543a5a4d..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from __future__ import annotations - -from .__main__ import cli_detect, query_yes_no - -__all__ = ( - "cli_detect", - "query_yes_no", -) diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__main__.py b/venv/lib/python3.12/site-packages/charset_normalizer/cli/__main__.py deleted file mode 100644 index cb64156a..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__main__.py +++ /dev/null @@ -1,381 +0,0 @@ -from __future__ import annotations - -import argparse -import sys -import typing -from json import dumps -from os.path import abspath, basename, dirname, join, realpath -from platform import python_version -from unicodedata import unidata_version - -import charset_normalizer.md as md_module -from charset_normalizer import from_fp -from charset_normalizer.models import CliDetectionResult -from charset_normalizer.version import __version__ - - -def query_yes_no(question: str, default: str = "yes") -> bool: - """Ask a yes/no question via input() and return their answer. - - "question" is a string that is presented to the user. - "default" is the presumed answer if the user just hits . - It must be "yes" (the default), "no" or None (meaning - an answer is required of the user). - - The "answer" return value is True for "yes" or False for "no". - - Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input - """ - valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} - if default is None: - prompt = " [y/n] " - elif default == "yes": - prompt = " [Y/n] " - elif default == "no": - prompt = " [y/N] " - else: - raise ValueError("invalid default answer: '%s'" % default) - - while True: - sys.stdout.write(question + prompt) - choice = input().lower() - if default is not None and choice == "": - return valid[default] - elif choice in valid: - return valid[choice] - else: - sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") - - -class FileType: - """Factory for creating file object types - - Instances of FileType are typically passed as type= arguments to the - ArgumentParser add_argument() method. - - Keyword Arguments: - - mode -- A string indicating how the file is to be opened. Accepts the - same values as the builtin open() function. - - bufsize -- The file's desired buffer size. Accepts the same values as - the builtin open() function. - - encoding -- The file's encoding. Accepts the same values as the - builtin open() function. - - errors -- A string indicating how encoding and decoding errors are to - be handled. Accepts the same value as the builtin open() function. - - Backported from CPython 3.12 - """ - - def __init__( - self, - mode: str = "r", - bufsize: int = -1, - encoding: str | None = None, - errors: str | None = None, - ): - self._mode = mode - self._bufsize = bufsize - self._encoding = encoding - self._errors = errors - - def __call__(self, string: str) -> typing.IO: # type: ignore[type-arg] - # the special argument "-" means sys.std{in,out} - if string == "-": - if "r" in self._mode: - return sys.stdin.buffer if "b" in self._mode else sys.stdin - elif any(c in self._mode for c in "wax"): - return sys.stdout.buffer if "b" in self._mode else sys.stdout - else: - msg = f'argument "-" with mode {self._mode}' - raise ValueError(msg) - - # all other arguments are used as file names - try: - return open(string, self._mode, self._bufsize, self._encoding, self._errors) - except OSError as e: - message = f"can't open '{string}': {e}" - raise argparse.ArgumentTypeError(message) - - def __repr__(self) -> str: - args = self._mode, self._bufsize - kwargs = [("encoding", self._encoding), ("errors", self._errors)] - args_str = ", ".join( - [repr(arg) for arg in args if arg != -1] - + [f"{kw}={arg!r}" for kw, arg in kwargs if arg is not None] - ) - return f"{type(self).__name__}({args_str})" - - -def cli_detect(argv: list[str] | None = None) -> int: - """ - CLI assistant using ARGV and ArgumentParser - :param argv: - :return: 0 if everything is fine, anything else equal trouble - """ - parser = argparse.ArgumentParser( - description="The Real First Universal Charset Detector. " - "Discover originating encoding used on text file. " - "Normalize text to unicode." - ) - - parser.add_argument( - "files", type=FileType("rb"), nargs="+", help="File(s) to be analysed" - ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - default=False, - dest="verbose", - help="Display complementary information about file if any. " - "Stdout will contain logs about the detection process.", - ) - parser.add_argument( - "-a", - "--with-alternative", - action="store_true", - default=False, - dest="alternatives", - help="Output complementary possibilities if any. Top-level JSON WILL be a list.", - ) - parser.add_argument( - "-n", - "--normalize", - action="store_true", - default=False, - dest="normalize", - help="Permit to normalize input file. If not set, program does not write anything.", - ) - parser.add_argument( - "-m", - "--minimal", - action="store_true", - default=False, - dest="minimal", - help="Only output the charset detected to STDOUT. Disabling JSON output.", - ) - parser.add_argument( - "-r", - "--replace", - action="store_true", - default=False, - dest="replace", - help="Replace file when trying to normalize it instead of creating a new one.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - dest="force", - help="Replace file without asking if you are sure, use this flag with caution.", - ) - parser.add_argument( - "-i", - "--no-preemptive", - action="store_true", - default=False, - dest="no_preemptive", - help="Disable looking at a charset declaration to hint the detector.", - ) - parser.add_argument( - "-t", - "--threshold", - action="store", - default=0.2, - type=float, - dest="threshold", - help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.", - ) - parser.add_argument( - "--version", - action="version", - version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( - __version__, - python_version(), - unidata_version, - "OFF" if md_module.__file__.lower().endswith(".py") else "ON", - ), - help="Show version information and exit.", - ) - - args = parser.parse_args(argv) - - if args.replace is True and args.normalize is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --replace in addition of --normalize only.", file=sys.stderr) - return 1 - - if args.force is True and args.replace is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --force in addition of --replace only.", file=sys.stderr) - return 1 - - if args.threshold < 0.0 or args.threshold > 1.0: - if args.files: - for my_file in args.files: - my_file.close() - print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) - return 1 - - x_ = [] - - for my_file in args.files: - matches = from_fp( - my_file, - threshold=args.threshold, - explain=args.verbose, - preemptive_behaviour=args.no_preemptive is False, - ) - - best_guess = matches.best() - - if best_guess is None: - print( - 'Unable to identify originating encoding for "{}". {}'.format( - my_file.name, - ( - "Maybe try increasing maximum amount of chaos." - if args.threshold < 1.0 - else "" - ), - ), - file=sys.stderr, - ) - x_.append( - CliDetectionResult( - abspath(my_file.name), - None, - [], - [], - "Unknown", - [], - False, - 1.0, - 0.0, - None, - True, - ) - ) - else: - x_.append( - CliDetectionResult( - abspath(my_file.name), - best_guess.encoding, - best_guess.encoding_aliases, - [ - cp - for cp in best_guess.could_be_from_charset - if cp != best_guess.encoding - ], - best_guess.language, - best_guess.alphabets, - best_guess.bom, - best_guess.percent_chaos, - best_guess.percent_coherence, - None, - True, - ) - ) - - if len(matches) > 1 and args.alternatives: - for el in matches: - if el != best_guess: - x_.append( - CliDetectionResult( - abspath(my_file.name), - el.encoding, - el.encoding_aliases, - [ - cp - for cp in el.could_be_from_charset - if cp != el.encoding - ], - el.language, - el.alphabets, - el.bom, - el.percent_chaos, - el.percent_coherence, - None, - False, - ) - ) - - if args.normalize is True: - if best_guess.encoding.startswith("utf") is True: - print( - '"{}" file does not need to be normalized, as it already came from unicode.'.format( - my_file.name - ), - file=sys.stderr, - ) - if my_file.closed is False: - my_file.close() - continue - - dir_path = dirname(realpath(my_file.name)) - file_name = basename(realpath(my_file.name)) - - o_: list[str] = file_name.split(".") - - if args.replace is False: - o_.insert(-1, best_guess.encoding) - if my_file.closed is False: - my_file.close() - elif ( - args.force is False - and query_yes_no( - 'Are you sure to normalize "{}" by replacing it ?'.format( - my_file.name - ), - "no", - ) - is False - ): - if my_file.closed is False: - my_file.close() - continue - - try: - x_[0].unicode_path = join(dir_path, ".".join(o_)) - - with open(x_[0].unicode_path, "wb") as fp: - fp.write(best_guess.output()) - except OSError as e: - print(str(e), file=sys.stderr) - if my_file.closed is False: - my_file.close() - return 2 - - if my_file.closed is False: - my_file.close() - - if args.minimal is False: - print( - dumps( - [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, - ensure_ascii=True, - indent=4, - ) - ) - else: - for my_file in args.files: - print( - ", ".join( - [ - el.encoding or "undefined" - for el in x_ - if el.path == abspath(my_file.name) - ] - ) - ) - - return 0 - - -if __name__ == "__main__": - cli_detect() diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index ca47ef9d..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index fa16d2a2..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/constant.py b/venv/lib/python3.12/site-packages/charset_normalizer/constant.py deleted file mode 100644 index cc71a019..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/constant.py +++ /dev/null @@ -1,2015 +0,0 @@ -from __future__ import annotations - -from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE -from encodings.aliases import aliases -from re import IGNORECASE -from re import compile as re_compile - -# Contain for each eligible encoding a list of/item bytes SIG/BOM -ENCODING_MARKS: dict[str, bytes | list[bytes]] = { - "utf_8": BOM_UTF8, - "utf_7": [ - b"\x2b\x2f\x76\x38", - b"\x2b\x2f\x76\x39", - b"\x2b\x2f\x76\x2b", - b"\x2b\x2f\x76\x2f", - b"\x2b\x2f\x76\x38\x2d", - ], - "gb18030": b"\x84\x31\x95\x33", - "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], - "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], -} - -TOO_SMALL_SEQUENCE: int = 32 -TOO_BIG_SEQUENCE: int = int(10e6) - -UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 - -# Up-to-date Unicode ucd/15.0.0 -UNICODE_RANGES_COMBINED: dict[str, range] = { - "Control character": range(32), - "Basic Latin": range(32, 128), - "Latin-1 Supplement": range(128, 256), - "Latin Extended-A": range(256, 384), - "Latin Extended-B": range(384, 592), - "IPA Extensions": range(592, 688), - "Spacing Modifier Letters": range(688, 768), - "Combining Diacritical Marks": range(768, 880), - "Greek and Coptic": range(880, 1024), - "Cyrillic": range(1024, 1280), - "Cyrillic Supplement": range(1280, 1328), - "Armenian": range(1328, 1424), - "Hebrew": range(1424, 1536), - "Arabic": range(1536, 1792), - "Syriac": range(1792, 1872), - "Arabic Supplement": range(1872, 1920), - "Thaana": range(1920, 1984), - "NKo": range(1984, 2048), - "Samaritan": range(2048, 2112), - "Mandaic": range(2112, 2144), - "Syriac Supplement": range(2144, 2160), - "Arabic Extended-B": range(2160, 2208), - "Arabic Extended-A": range(2208, 2304), - "Devanagari": range(2304, 2432), - "Bengali": range(2432, 2560), - "Gurmukhi": range(2560, 2688), - "Gujarati": range(2688, 2816), - "Oriya": range(2816, 2944), - "Tamil": range(2944, 3072), - "Telugu": range(3072, 3200), - "Kannada": range(3200, 3328), - "Malayalam": range(3328, 3456), - "Sinhala": range(3456, 3584), - "Thai": range(3584, 3712), - "Lao": range(3712, 3840), - "Tibetan": range(3840, 4096), - "Myanmar": range(4096, 4256), - "Georgian": range(4256, 4352), - "Hangul Jamo": range(4352, 4608), - "Ethiopic": range(4608, 4992), - "Ethiopic Supplement": range(4992, 5024), - "Cherokee": range(5024, 5120), - "Unified Canadian Aboriginal Syllabics": range(5120, 5760), - "Ogham": range(5760, 5792), - "Runic": range(5792, 5888), - "Tagalog": range(5888, 5920), - "Hanunoo": range(5920, 5952), - "Buhid": range(5952, 5984), - "Tagbanwa": range(5984, 6016), - "Khmer": range(6016, 6144), - "Mongolian": range(6144, 6320), - "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), - "Limbu": range(6400, 6480), - "Tai Le": range(6480, 6528), - "New Tai Lue": range(6528, 6624), - "Khmer Symbols": range(6624, 6656), - "Buginese": range(6656, 6688), - "Tai Tham": range(6688, 6832), - "Combining Diacritical Marks Extended": range(6832, 6912), - "Balinese": range(6912, 7040), - "Sundanese": range(7040, 7104), - "Batak": range(7104, 7168), - "Lepcha": range(7168, 7248), - "Ol Chiki": range(7248, 7296), - "Cyrillic Extended-C": range(7296, 7312), - "Georgian Extended": range(7312, 7360), - "Sundanese Supplement": range(7360, 7376), - "Vedic Extensions": range(7376, 7424), - "Phonetic Extensions": range(7424, 7552), - "Phonetic Extensions Supplement": range(7552, 7616), - "Combining Diacritical Marks Supplement": range(7616, 7680), - "Latin Extended Additional": range(7680, 7936), - "Greek Extended": range(7936, 8192), - "General Punctuation": range(8192, 8304), - "Superscripts and Subscripts": range(8304, 8352), - "Currency Symbols": range(8352, 8400), - "Combining Diacritical Marks for Symbols": range(8400, 8448), - "Letterlike Symbols": range(8448, 8528), - "Number Forms": range(8528, 8592), - "Arrows": range(8592, 8704), - "Mathematical Operators": range(8704, 8960), - "Miscellaneous Technical": range(8960, 9216), - "Control Pictures": range(9216, 9280), - "Optical Character Recognition": range(9280, 9312), - "Enclosed Alphanumerics": range(9312, 9472), - "Box Drawing": range(9472, 9600), - "Block Elements": range(9600, 9632), - "Geometric Shapes": range(9632, 9728), - "Miscellaneous Symbols": range(9728, 9984), - "Dingbats": range(9984, 10176), - "Miscellaneous Mathematical Symbols-A": range(10176, 10224), - "Supplemental Arrows-A": range(10224, 10240), - "Braille Patterns": range(10240, 10496), - "Supplemental Arrows-B": range(10496, 10624), - "Miscellaneous Mathematical Symbols-B": range(10624, 10752), - "Supplemental Mathematical Operators": range(10752, 11008), - "Miscellaneous Symbols and Arrows": range(11008, 11264), - "Glagolitic": range(11264, 11360), - "Latin Extended-C": range(11360, 11392), - "Coptic": range(11392, 11520), - "Georgian Supplement": range(11520, 11568), - "Tifinagh": range(11568, 11648), - "Ethiopic Extended": range(11648, 11744), - "Cyrillic Extended-A": range(11744, 11776), - "Supplemental Punctuation": range(11776, 11904), - "CJK Radicals Supplement": range(11904, 12032), - "Kangxi Radicals": range(12032, 12256), - "Ideographic Description Characters": range(12272, 12288), - "CJK Symbols and Punctuation": range(12288, 12352), - "Hiragana": range(12352, 12448), - "Katakana": range(12448, 12544), - "Bopomofo": range(12544, 12592), - "Hangul Compatibility Jamo": range(12592, 12688), - "Kanbun": range(12688, 12704), - "Bopomofo Extended": range(12704, 12736), - "CJK Strokes": range(12736, 12784), - "Katakana Phonetic Extensions": range(12784, 12800), - "Enclosed CJK Letters and Months": range(12800, 13056), - "CJK Compatibility": range(13056, 13312), - "CJK Unified Ideographs Extension A": range(13312, 19904), - "Yijing Hexagram Symbols": range(19904, 19968), - "CJK Unified Ideographs": range(19968, 40960), - "Yi Syllables": range(40960, 42128), - "Yi Radicals": range(42128, 42192), - "Lisu": range(42192, 42240), - "Vai": range(42240, 42560), - "Cyrillic Extended-B": range(42560, 42656), - "Bamum": range(42656, 42752), - "Modifier Tone Letters": range(42752, 42784), - "Latin Extended-D": range(42784, 43008), - "Syloti Nagri": range(43008, 43056), - "Common Indic Number Forms": range(43056, 43072), - "Phags-pa": range(43072, 43136), - "Saurashtra": range(43136, 43232), - "Devanagari Extended": range(43232, 43264), - "Kayah Li": range(43264, 43312), - "Rejang": range(43312, 43360), - "Hangul Jamo Extended-A": range(43360, 43392), - "Javanese": range(43392, 43488), - "Myanmar Extended-B": range(43488, 43520), - "Cham": range(43520, 43616), - "Myanmar Extended-A": range(43616, 43648), - "Tai Viet": range(43648, 43744), - "Meetei Mayek Extensions": range(43744, 43776), - "Ethiopic Extended-A": range(43776, 43824), - "Latin Extended-E": range(43824, 43888), - "Cherokee Supplement": range(43888, 43968), - "Meetei Mayek": range(43968, 44032), - "Hangul Syllables": range(44032, 55216), - "Hangul Jamo Extended-B": range(55216, 55296), - "High Surrogates": range(55296, 56192), - "High Private Use Surrogates": range(56192, 56320), - "Low Surrogates": range(56320, 57344), - "Private Use Area": range(57344, 63744), - "CJK Compatibility Ideographs": range(63744, 64256), - "Alphabetic Presentation Forms": range(64256, 64336), - "Arabic Presentation Forms-A": range(64336, 65024), - "Variation Selectors": range(65024, 65040), - "Vertical Forms": range(65040, 65056), - "Combining Half Marks": range(65056, 65072), - "CJK Compatibility Forms": range(65072, 65104), - "Small Form Variants": range(65104, 65136), - "Arabic Presentation Forms-B": range(65136, 65280), - "Halfwidth and Fullwidth Forms": range(65280, 65520), - "Specials": range(65520, 65536), - "Linear B Syllabary": range(65536, 65664), - "Linear B Ideograms": range(65664, 65792), - "Aegean Numbers": range(65792, 65856), - "Ancient Greek Numbers": range(65856, 65936), - "Ancient Symbols": range(65936, 66000), - "Phaistos Disc": range(66000, 66048), - "Lycian": range(66176, 66208), - "Carian": range(66208, 66272), - "Coptic Epact Numbers": range(66272, 66304), - "Old Italic": range(66304, 66352), - "Gothic": range(66352, 66384), - "Old Permic": range(66384, 66432), - "Ugaritic": range(66432, 66464), - "Old Persian": range(66464, 66528), - "Deseret": range(66560, 66640), - "Shavian": range(66640, 66688), - "Osmanya": range(66688, 66736), - "Osage": range(66736, 66816), - "Elbasan": range(66816, 66864), - "Caucasian Albanian": range(66864, 66928), - "Vithkuqi": range(66928, 67008), - "Linear A": range(67072, 67456), - "Latin Extended-F": range(67456, 67520), - "Cypriot Syllabary": range(67584, 67648), - "Imperial Aramaic": range(67648, 67680), - "Palmyrene": range(67680, 67712), - "Nabataean": range(67712, 67760), - "Hatran": range(67808, 67840), - "Phoenician": range(67840, 67872), - "Lydian": range(67872, 67904), - "Meroitic Hieroglyphs": range(67968, 68000), - "Meroitic Cursive": range(68000, 68096), - "Kharoshthi": range(68096, 68192), - "Old South Arabian": range(68192, 68224), - "Old North Arabian": range(68224, 68256), - "Manichaean": range(68288, 68352), - "Avestan": range(68352, 68416), - "Inscriptional Parthian": range(68416, 68448), - "Inscriptional Pahlavi": range(68448, 68480), - "Psalter Pahlavi": range(68480, 68528), - "Old Turkic": range(68608, 68688), - "Old Hungarian": range(68736, 68864), - "Hanifi Rohingya": range(68864, 68928), - "Rumi Numeral Symbols": range(69216, 69248), - "Yezidi": range(69248, 69312), - "Arabic Extended-C": range(69312, 69376), - "Old Sogdian": range(69376, 69424), - "Sogdian": range(69424, 69488), - "Old Uyghur": range(69488, 69552), - "Chorasmian": range(69552, 69600), - "Elymaic": range(69600, 69632), - "Brahmi": range(69632, 69760), - "Kaithi": range(69760, 69840), - "Sora Sompeng": range(69840, 69888), - "Chakma": range(69888, 69968), - "Mahajani": range(69968, 70016), - "Sharada": range(70016, 70112), - "Sinhala Archaic Numbers": range(70112, 70144), - "Khojki": range(70144, 70224), - "Multani": range(70272, 70320), - "Khudawadi": range(70320, 70400), - "Grantha": range(70400, 70528), - "Newa": range(70656, 70784), - "Tirhuta": range(70784, 70880), - "Siddham": range(71040, 71168), - "Modi": range(71168, 71264), - "Mongolian Supplement": range(71264, 71296), - "Takri": range(71296, 71376), - "Ahom": range(71424, 71504), - "Dogra": range(71680, 71760), - "Warang Citi": range(71840, 71936), - "Dives Akuru": range(71936, 72032), - "Nandinagari": range(72096, 72192), - "Zanabazar Square": range(72192, 72272), - "Soyombo": range(72272, 72368), - "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), - "Pau Cin Hau": range(72384, 72448), - "Devanagari Extended-A": range(72448, 72544), - "Bhaiksuki": range(72704, 72816), - "Marchen": range(72816, 72896), - "Masaram Gondi": range(72960, 73056), - "Gunjala Gondi": range(73056, 73136), - "Makasar": range(73440, 73472), - "Kawi": range(73472, 73568), - "Lisu Supplement": range(73648, 73664), - "Tamil Supplement": range(73664, 73728), - "Cuneiform": range(73728, 74752), - "Cuneiform Numbers and Punctuation": range(74752, 74880), - "Early Dynastic Cuneiform": range(74880, 75088), - "Cypro-Minoan": range(77712, 77824), - "Egyptian Hieroglyphs": range(77824, 78896), - "Egyptian Hieroglyph Format Controls": range(78896, 78944), - "Anatolian Hieroglyphs": range(82944, 83584), - "Bamum Supplement": range(92160, 92736), - "Mro": range(92736, 92784), - "Tangsa": range(92784, 92880), - "Bassa Vah": range(92880, 92928), - "Pahawh Hmong": range(92928, 93072), - "Medefaidrin": range(93760, 93856), - "Miao": range(93952, 94112), - "Ideographic Symbols and Punctuation": range(94176, 94208), - "Tangut": range(94208, 100352), - "Tangut Components": range(100352, 101120), - "Khitan Small Script": range(101120, 101632), - "Tangut Supplement": range(101632, 101760), - "Kana Extended-B": range(110576, 110592), - "Kana Supplement": range(110592, 110848), - "Kana Extended-A": range(110848, 110896), - "Small Kana Extension": range(110896, 110960), - "Nushu": range(110960, 111360), - "Duployan": range(113664, 113824), - "Shorthand Format Controls": range(113824, 113840), - "Znamenny Musical Notation": range(118528, 118736), - "Byzantine Musical Symbols": range(118784, 119040), - "Musical Symbols": range(119040, 119296), - "Ancient Greek Musical Notation": range(119296, 119376), - "Kaktovik Numerals": range(119488, 119520), - "Mayan Numerals": range(119520, 119552), - "Tai Xuan Jing Symbols": range(119552, 119648), - "Counting Rod Numerals": range(119648, 119680), - "Mathematical Alphanumeric Symbols": range(119808, 120832), - "Sutton SignWriting": range(120832, 121520), - "Latin Extended-G": range(122624, 122880), - "Glagolitic Supplement": range(122880, 122928), - "Cyrillic Extended-D": range(122928, 123024), - "Nyiakeng Puachue Hmong": range(123136, 123216), - "Toto": range(123536, 123584), - "Wancho": range(123584, 123648), - "Nag Mundari": range(124112, 124160), - "Ethiopic Extended-B": range(124896, 124928), - "Mende Kikakui": range(124928, 125152), - "Adlam": range(125184, 125280), - "Indic Siyaq Numbers": range(126064, 126144), - "Ottoman Siyaq Numbers": range(126208, 126288), - "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), - "Mahjong Tiles": range(126976, 127024), - "Domino Tiles": range(127024, 127136), - "Playing Cards": range(127136, 127232), - "Enclosed Alphanumeric Supplement": range(127232, 127488), - "Enclosed Ideographic Supplement": range(127488, 127744), - "Miscellaneous Symbols and Pictographs": range(127744, 128512), - "Emoticons range(Emoji)": range(128512, 128592), - "Ornamental Dingbats": range(128592, 128640), - "Transport and Map Symbols": range(128640, 128768), - "Alchemical Symbols": range(128768, 128896), - "Geometric Shapes Extended": range(128896, 129024), - "Supplemental Arrows-C": range(129024, 129280), - "Supplemental Symbols and Pictographs": range(129280, 129536), - "Chess Symbols": range(129536, 129648), - "Symbols and Pictographs Extended-A": range(129648, 129792), - "Symbols for Legacy Computing": range(129792, 130048), - "CJK Unified Ideographs Extension B": range(131072, 173792), - "CJK Unified Ideographs Extension C": range(173824, 177984), - "CJK Unified Ideographs Extension D": range(177984, 178208), - "CJK Unified Ideographs Extension E": range(178208, 183984), - "CJK Unified Ideographs Extension F": range(183984, 191472), - "CJK Compatibility Ideographs Supplement": range(194560, 195104), - "CJK Unified Ideographs Extension G": range(196608, 201552), - "CJK Unified Ideographs Extension H": range(201552, 205744), - "Tags": range(917504, 917632), - "Variation Selectors Supplement": range(917760, 918000), - "Supplementary Private Use Area-A": range(983040, 1048576), - "Supplementary Private Use Area-B": range(1048576, 1114112), -} - - -UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [ - "Supplement", - "Extended", - "Extensions", - "Modifier", - "Marks", - "Punctuation", - "Symbols", - "Forms", - "Operators", - "Miscellaneous", - "Drawing", - "Block", - "Shapes", - "Supplemental", - "Tags", -] - -RE_POSSIBLE_ENCODING_INDICATION = re_compile( - r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", - IGNORECASE, -) - -IANA_NO_ALIASES = [ - "cp720", - "cp737", - "cp856", - "cp874", - "cp875", - "cp1006", - "koi8_r", - "koi8_t", - "koi8_u", -] - -IANA_SUPPORTED: list[str] = sorted( - filter( - lambda x: x.endswith("_codec") is False - and x not in {"rot_13", "tactis", "mbcs"}, - list(set(aliases.values())) + IANA_NO_ALIASES, - ) -) - -IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) - -# pre-computed code page that are similar using the function cp_similarity. -IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = { - "cp037": ["cp1026", "cp1140", "cp273", "cp500"], - "cp1026": ["cp037", "cp1140", "cp273", "cp500"], - "cp1125": ["cp866"], - "cp1140": ["cp037", "cp1026", "cp273", "cp500"], - "cp1250": ["iso8859_2"], - "cp1251": ["kz1048", "ptcp154"], - "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1253": ["iso8859_7"], - "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1257": ["iso8859_13"], - "cp273": ["cp037", "cp1026", "cp1140", "cp500"], - "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], - "cp500": ["cp037", "cp1026", "cp1140", "cp273"], - "cp850": ["cp437", "cp857", "cp858", "cp865"], - "cp857": ["cp850", "cp858", "cp865"], - "cp858": ["cp437", "cp850", "cp857", "cp865"], - "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], - "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], - "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], - "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], - "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], - "cp866": ["cp1125"], - "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], - "iso8859_11": ["tis_620"], - "iso8859_13": ["cp1257"], - "iso8859_14": [ - "iso8859_10", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_15": [ - "cp1252", - "cp1254", - "iso8859_10", - "iso8859_14", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_16": [ - "iso8859_14", - "iso8859_15", - "iso8859_2", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], - "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], - "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], - "iso8859_7": ["cp1253"], - "iso8859_9": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "latin_1", - ], - "kz1048": ["cp1251", "ptcp154"], - "latin_1": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "iso8859_9", - ], - "mac_iceland": ["mac_roman", "mac_turkish"], - "mac_roman": ["mac_iceland", "mac_turkish"], - "mac_turkish": ["mac_iceland", "mac_roman"], - "ptcp154": ["cp1251", "kz1048"], - "tis_620": ["iso8859_11"], -} - - -CHARDET_CORRESPONDENCE: dict[str, str] = { - "iso2022_kr": "ISO-2022-KR", - "iso2022_jp": "ISO-2022-JP", - "euc_kr": "EUC-KR", - "tis_620": "TIS-620", - "utf_32": "UTF-32", - "euc_jp": "EUC-JP", - "koi8_r": "KOI8-R", - "iso8859_1": "ISO-8859-1", - "iso8859_2": "ISO-8859-2", - "iso8859_5": "ISO-8859-5", - "iso8859_6": "ISO-8859-6", - "iso8859_7": "ISO-8859-7", - "iso8859_8": "ISO-8859-8", - "utf_16": "UTF-16", - "cp855": "IBM855", - "mac_cyrillic": "MacCyrillic", - "gb2312": "GB2312", - "gb18030": "GB18030", - "cp932": "CP932", - "cp866": "IBM866", - "utf_8": "utf-8", - "utf_8_sig": "UTF-8-SIG", - "shift_jis": "SHIFT_JIS", - "big5": "Big5", - "cp1250": "windows-1250", - "cp1251": "windows-1251", - "cp1252": "Windows-1252", - "cp1253": "windows-1253", - "cp1255": "windows-1255", - "cp1256": "windows-1256", - "cp1254": "Windows-1254", - "cp949": "CP949", -} - - -COMMON_SAFE_ASCII_CHARACTERS: set[str] = { - "<", - ">", - "=", - ":", - "/", - "&", - ";", - "{", - "}", - "[", - "]", - ",", - "|", - '"', - "-", - "(", - ")", -} - -# Sample character sets — replace with full lists if needed -COMMON_CHINESE_CHARACTERS = "的一是在不了有和人这中大为上个国我以要他时来用们生到作地于出就分对成会可主发年动同工也能下过子说产种面而方后多定行学法所民得经十三之进着等部度家电力里如水化高自二理起小物现实加量都两体制机当使点从业本去把性好应开它合还因由其些然前外天政四日那社义事平形相全表间样与关各重新线内数正心反你明看原又么利比或但质气第向道命此变条只没结解问意建月公无系军很情者最立代想已通并提直题党程展五果料象员革位入常文总次品式活设及管特件长求老头基资边流路级少图山统接知较将组见计别她手角期根论运农指几九区强放决西被干做必战先回则任取据处队南给色光门即保治北造百规热领七海口东导器压志世金增争济阶油思术极交受联什认六共权收证改清己美再采转更单风切打白教速花带安场身车例真务具万每目至达走积示议声报斗完类八离华名确才科张信马节话米整空元况今集温传土许步群广石记需段研界拉林律叫且究观越织装影算低持音众书布复容儿须际商非验连断深难近矿千周委素技备半办青省列习响约支般史感劳便团往酸历市克何除消构府太准精值号率族维划选标写存候毛亲快效斯院查江型眼王按格养易置派层片始却专状育厂京识适属圆包火住调满县局照参红细引听该铁价严龙飞" - -COMMON_JAPANESE_CHARACTERS = "日一国年大十二本中長出三時行見月分後前生五間上東四今金九入学高円子外八六下来気小七山話女北午百書先名川千水半男西電校語土木聞食車何南万毎白天母火右読友左休父雨" - -COMMON_KOREAN_CHARACTERS = "一二三四五六七八九十百千萬上下左右中人女子大小山川日月火水木金土父母天地國名年時文校學生" - -# Combine all into a set -COMMON_CJK_CHARACTERS = set( - "".join( - [ - COMMON_CHINESE_CHARACTERS, - COMMON_JAPANESE_CHARACTERS, - COMMON_KOREAN_CHARACTERS, - ] - ) -) - -KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"} -ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"} - -# Logging LEVEL below DEBUG -TRACE: int = 5 - - -# Language label that contain the em dash "—" -# character are to be considered alternative seq to origin -FREQUENCIES: dict[str, list[str]] = { - "English": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "u", - "m", - "f", - "p", - "g", - "w", - "y", - "b", - "v", - "k", - "x", - "j", - "z", - "q", - ], - "English—": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "m", - "u", - "f", - "p", - "g", - "w", - "b", - "y", - "v", - "k", - "j", - "x", - "z", - "q", - ], - "German": [ - "e", - "n", - "i", - "r", - "s", - "t", - "a", - "d", - "h", - "u", - "l", - "g", - "o", - "c", - "m", - "b", - "f", - "k", - "w", - "z", - "p", - "v", - "ü", - "ä", - "ö", - "j", - ], - "French": [ - "e", - "a", - "s", - "n", - "i", - "t", - "r", - "l", - "u", - "o", - "d", - "c", - "p", - "m", - "é", - "v", - "g", - "f", - "b", - "h", - "q", - "à", - "x", - "è", - "y", - "j", - ], - "Dutch": [ - "e", - "n", - "a", - "i", - "r", - "t", - "o", - "d", - "s", - "l", - "g", - "h", - "v", - "m", - "u", - "k", - "c", - "p", - "b", - "w", - "j", - "z", - "f", - "y", - "x", - "ë", - ], - "Italian": [ - "e", - "i", - "a", - "o", - "n", - "l", - "t", - "r", - "s", - "c", - "d", - "u", - "p", - "m", - "g", - "v", - "f", - "b", - "z", - "h", - "q", - "è", - "à", - "k", - "y", - "ò", - ], - "Polish": [ - "a", - "i", - "o", - "e", - "n", - "r", - "z", - "w", - "s", - "c", - "t", - "k", - "y", - "d", - "p", - "m", - "u", - "l", - "j", - "ł", - "g", - "b", - "h", - "ą", - "ę", - "ó", - ], - "Spanish": [ - "e", - "a", - "o", - "n", - "s", - "r", - "i", - "l", - "d", - "t", - "c", - "u", - "m", - "p", - "b", - "g", - "v", - "f", - "y", - "ó", - "h", - "q", - "í", - "j", - "z", - "á", - ], - "Russian": [ - "о", - "а", - "е", - "и", - "н", - "с", - "т", - "р", - "в", - "л", - "к", - "м", - "д", - "п", - "у", - "г", - "я", - "ы", - "з", - "б", - "й", - "ь", - "ч", - "х", - "ж", - "ц", - ], - # Jap-Kanji - "Japanese": [ - "人", - "一", - "大", - "亅", - "丁", - "丨", - "竹", - "笑", - "口", - "日", - "今", - "二", - "彳", - "行", - "十", - "土", - "丶", - "寸", - "寺", - "時", - "乙", - "丿", - "乂", - "气", - "気", - "冂", - "巾", - "亠", - "市", - "目", - "儿", - "見", - "八", - "小", - "凵", - "県", - "月", - "彐", - "門", - "間", - "木", - "東", - "山", - "出", - "本", - "中", - "刀", - "分", - "耳", - "又", - "取", - "最", - "言", - "田", - "心", - "思", - "刂", - "前", - "京", - "尹", - "事", - "生", - "厶", - "云", - "会", - "未", - "来", - "白", - "冫", - "楽", - "灬", - "馬", - "尸", - "尺", - "駅", - "明", - "耂", - "者", - "了", - "阝", - "都", - "高", - "卜", - "占", - "厂", - "广", - "店", - "子", - "申", - "奄", - "亻", - "俺", - "上", - "方", - "冖", - "学", - "衣", - "艮", - "食", - "自", - ], - # Jap-Katakana - "Japanese—": [ - "ー", - "ン", - "ス", - "・", - "ル", - "ト", - "リ", - "イ", - "ア", - "ラ", - "ッ", - "ク", - "ド", - "シ", - "レ", - "ジ", - "タ", - "フ", - "ロ", - "カ", - "テ", - "マ", - "ィ", - "グ", - "バ", - "ム", - "プ", - "オ", - "コ", - "デ", - "ニ", - "ウ", - "メ", - "サ", - "ビ", - "ナ", - "ブ", - "ャ", - "エ", - "ュ", - "チ", - "キ", - "ズ", - "ダ", - "パ", - "ミ", - "ェ", - "ョ", - "ハ", - "セ", - "ベ", - "ガ", - "モ", - "ツ", - "ネ", - "ボ", - "ソ", - "ノ", - "ァ", - "ヴ", - "ワ", - "ポ", - "ペ", - "ピ", - "ケ", - "ゴ", - "ギ", - "ザ", - "ホ", - "ゲ", - "ォ", - "ヤ", - "ヒ", - "ユ", - "ヨ", - "ヘ", - "ゼ", - "ヌ", - "ゥ", - "ゾ", - "ヶ", - "ヂ", - "ヲ", - "ヅ", - "ヵ", - "ヱ", - "ヰ", - "ヮ", - "ヽ", - "゠", - "ヾ", - "ヷ", - "ヿ", - "ヸ", - "ヹ", - "ヺ", - ], - # Jap-Hiragana - "Japanese——": [ - "の", - "に", - "る", - "た", - "と", - "は", - "し", - "い", - "を", - "で", - "て", - "が", - "な", - "れ", - "か", - "ら", - "さ", - "っ", - "り", - "す", - "あ", - "も", - "こ", - "ま", - "う", - "く", - "よ", - "き", - "ん", - "め", - "お", - "け", - "そ", - "つ", - "だ", - "や", - "え", - "ど", - "わ", - "ち", - "み", - "せ", - "じ", - "ば", - "へ", - "び", - "ず", - "ろ", - "ほ", - "げ", - "む", - "べ", - "ひ", - "ょ", - "ゆ", - "ぶ", - "ご", - "ゃ", - "ね", - "ふ", - "ぐ", - "ぎ", - "ぼ", - "ゅ", - "づ", - "ざ", - "ぞ", - "ぬ", - "ぜ", - "ぱ", - "ぽ", - "ぷ", - "ぴ", - "ぃ", - "ぁ", - "ぇ", - "ぺ", - "ゞ", - "ぢ", - "ぉ", - "ぅ", - "ゐ", - "ゝ", - "ゑ", - "゛", - "゜", - "ゎ", - "ゔ", - "゚", - "ゟ", - "゙", - "ゕ", - "ゖ", - ], - "Portuguese": [ - "a", - "e", - "o", - "s", - "i", - "r", - "d", - "n", - "t", - "m", - "u", - "c", - "l", - "p", - "g", - "v", - "b", - "f", - "h", - "ã", - "q", - "é", - "ç", - "á", - "z", - "í", - ], - "Swedish": [ - "e", - "a", - "n", - "r", - "t", - "s", - "i", - "l", - "d", - "o", - "m", - "k", - "g", - "v", - "h", - "f", - "u", - "p", - "ä", - "c", - "b", - "ö", - "å", - "y", - "j", - "x", - ], - "Chinese": [ - "的", - "一", - "是", - "不", - "了", - "在", - "人", - "有", - "我", - "他", - "这", - "个", - "们", - "中", - "来", - "上", - "大", - "为", - "和", - "国", - "地", - "到", - "以", - "说", - "时", - "要", - "就", - "出", - "会", - "可", - "也", - "你", - "对", - "生", - "能", - "而", - "子", - "那", - "得", - "于", - "着", - "下", - "自", - "之", - "年", - "过", - "发", - "后", - "作", - "里", - "用", - "道", - "行", - "所", - "然", - "家", - "种", - "事", - "成", - "方", - "多", - "经", - "么", - "去", - "法", - "学", - "如", - "都", - "同", - "现", - "当", - "没", - "动", - "面", - "起", - "看", - "定", - "天", - "分", - "还", - "进", - "好", - "小", - "部", - "其", - "些", - "主", - "样", - "理", - "心", - "她", - "本", - "前", - "开", - "但", - "因", - "只", - "从", - "想", - "实", - ], - "Ukrainian": [ - "о", - "а", - "н", - "і", - "и", - "р", - "в", - "т", - "е", - "с", - "к", - "л", - "у", - "д", - "м", - "п", - "з", - "я", - "ь", - "б", - "г", - "й", - "ч", - "х", - "ц", - "ї", - ], - "Norwegian": [ - "e", - "r", - "n", - "t", - "a", - "s", - "i", - "o", - "l", - "d", - "g", - "k", - "m", - "v", - "f", - "p", - "u", - "b", - "h", - "å", - "y", - "j", - "ø", - "c", - "æ", - "w", - ], - "Finnish": [ - "a", - "i", - "n", - "t", - "e", - "s", - "l", - "o", - "u", - "k", - "ä", - "m", - "r", - "v", - "j", - "h", - "p", - "y", - "d", - "ö", - "g", - "c", - "b", - "f", - "w", - "z", - ], - "Vietnamese": [ - "n", - "h", - "t", - "i", - "c", - "g", - "a", - "o", - "u", - "m", - "l", - "r", - "à", - "đ", - "s", - "e", - "v", - "p", - "b", - "y", - "ư", - "d", - "á", - "k", - "ộ", - "ế", - ], - "Czech": [ - "o", - "e", - "a", - "n", - "t", - "s", - "i", - "l", - "v", - "r", - "k", - "d", - "u", - "m", - "p", - "í", - "c", - "h", - "z", - "á", - "y", - "j", - "b", - "ě", - "é", - "ř", - ], - "Hungarian": [ - "e", - "a", - "t", - "l", - "s", - "n", - "k", - "r", - "i", - "o", - "z", - "á", - "é", - "g", - "m", - "b", - "y", - "v", - "d", - "h", - "u", - "p", - "j", - "ö", - "f", - "c", - ], - "Korean": [ - "이", - "다", - "에", - "의", - "는", - "로", - "하", - "을", - "가", - "고", - "지", - "서", - "한", - "은", - "기", - "으", - "년", - "대", - "사", - "시", - "를", - "리", - "도", - "인", - "스", - "일", - ], - "Indonesian": [ - "a", - "n", - "e", - "i", - "r", - "t", - "u", - "s", - "d", - "k", - "m", - "l", - "g", - "p", - "b", - "o", - "h", - "y", - "j", - "c", - "w", - "f", - "v", - "z", - "x", - "q", - ], - "Turkish": [ - "a", - "e", - "i", - "n", - "r", - "l", - "ı", - "k", - "d", - "t", - "s", - "m", - "y", - "u", - "o", - "b", - "ü", - "ş", - "v", - "g", - "z", - "h", - "c", - "p", - "ç", - "ğ", - ], - "Romanian": [ - "e", - "i", - "a", - "r", - "n", - "t", - "u", - "l", - "o", - "c", - "s", - "d", - "p", - "m", - "ă", - "f", - "v", - "î", - "g", - "b", - "ș", - "ț", - "z", - "h", - "â", - "j", - ], - "Farsi": [ - "ا", - "ی", - "ر", - "د", - "ن", - "ه", - "و", - "م", - "ت", - "ب", - "س", - "ل", - "ک", - "ش", - "ز", - "ف", - "گ", - "ع", - "خ", - "ق", - "ج", - "آ", - "پ", - "ح", - "ط", - "ص", - ], - "Arabic": [ - "ا", - "ل", - "ي", - "م", - "و", - "ن", - "ر", - "ت", - "ب", - "ة", - "ع", - "د", - "س", - "ف", - "ه", - "ك", - "ق", - "أ", - "ح", - "ج", - "ش", - "ط", - "ص", - "ى", - "خ", - "إ", - ], - "Danish": [ - "e", - "r", - "n", - "t", - "a", - "i", - "s", - "d", - "l", - "o", - "g", - "m", - "k", - "f", - "v", - "u", - "b", - "h", - "p", - "å", - "y", - "ø", - "æ", - "c", - "j", - "w", - ], - "Serbian": [ - "а", - "и", - "о", - "е", - "н", - "р", - "с", - "у", - "т", - "к", - "ј", - "в", - "д", - "м", - "п", - "л", - "г", - "з", - "б", - "a", - "i", - "e", - "o", - "n", - "ц", - "ш", - ], - "Lithuanian": [ - "i", - "a", - "s", - "o", - "r", - "e", - "t", - "n", - "u", - "k", - "m", - "l", - "p", - "v", - "d", - "j", - "g", - "ė", - "b", - "y", - "ų", - "š", - "ž", - "c", - "ą", - "į", - ], - "Slovene": [ - "e", - "a", - "i", - "o", - "n", - "r", - "s", - "l", - "t", - "j", - "v", - "k", - "d", - "p", - "m", - "u", - "z", - "b", - "g", - "h", - "č", - "c", - "š", - "ž", - "f", - "y", - ], - "Slovak": [ - "o", - "a", - "e", - "n", - "i", - "r", - "v", - "t", - "s", - "l", - "k", - "d", - "m", - "p", - "u", - "c", - "h", - "j", - "b", - "z", - "á", - "y", - "ý", - "í", - "č", - "é", - ], - "Hebrew": [ - "י", - "ו", - "ה", - "ל", - "ר", - "ב", - "ת", - "מ", - "א", - "ש", - "נ", - "ע", - "ם", - "ד", - "ק", - "ח", - "פ", - "ס", - "כ", - "ג", - "ט", - "צ", - "ן", - "ז", - "ך", - ], - "Bulgarian": [ - "а", - "и", - "о", - "е", - "н", - "т", - "р", - "с", - "в", - "л", - "к", - "д", - "п", - "м", - "з", - "г", - "я", - "ъ", - "у", - "б", - "ч", - "ц", - "й", - "ж", - "щ", - "х", - ], - "Croatian": [ - "a", - "i", - "o", - "e", - "n", - "r", - "j", - "s", - "t", - "u", - "k", - "l", - "v", - "d", - "m", - "p", - "g", - "z", - "b", - "c", - "č", - "h", - "š", - "ž", - "ć", - "f", - ], - "Hindi": [ - "क", - "र", - "स", - "न", - "त", - "म", - "ह", - "प", - "य", - "ल", - "व", - "ज", - "द", - "ग", - "ब", - "श", - "ट", - "अ", - "ए", - "थ", - "भ", - "ड", - "च", - "ध", - "ष", - "इ", - ], - "Estonian": [ - "a", - "i", - "e", - "s", - "t", - "l", - "u", - "n", - "o", - "k", - "r", - "d", - "m", - "v", - "g", - "p", - "j", - "h", - "ä", - "b", - "õ", - "ü", - "f", - "c", - "ö", - "y", - ], - "Thai": [ - "า", - "น", - "ร", - "อ", - "ก", - "เ", - "ง", - "ม", - "ย", - "ล", - "ว", - "ด", - "ท", - "ส", - "ต", - "ะ", - "ป", - "บ", - "ค", - "ห", - "แ", - "จ", - "พ", - "ช", - "ข", - "ใ", - ], - "Greek": [ - "α", - "τ", - "ο", - "ι", - "ε", - "ν", - "ρ", - "σ", - "κ", - "η", - "π", - "ς", - "υ", - "μ", - "λ", - "ί", - "ό", - "ά", - "γ", - "έ", - "δ", - "ή", - "ω", - "χ", - "θ", - "ύ", - ], - "Tamil": [ - "க", - "த", - "ப", - "ட", - "ர", - "ம", - "ல", - "ன", - "வ", - "ற", - "ய", - "ள", - "ச", - "ந", - "இ", - "ண", - "அ", - "ஆ", - "ழ", - "ங", - "எ", - "உ", - "ஒ", - "ஸ", - ], - "Kazakh": [ - "а", - "ы", - "е", - "н", - "т", - "р", - "л", - "і", - "д", - "с", - "м", - "қ", - "к", - "о", - "б", - "и", - "у", - "ғ", - "ж", - "ң", - "з", - "ш", - "й", - "п", - "г", - "ө", - ], -} - -LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/legacy.py b/venv/lib/python3.12/site-packages/charset_normalizer/legacy.py deleted file mode 100644 index e221beca..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/legacy.py +++ /dev/null @@ -1,64 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any -from warnings import warn - -from .api import from_bytes -from .constant import CHARDET_CORRESPONDENCE - -# TODO: remove this check when dropping Python 3.7 support -if TYPE_CHECKING: - from typing_extensions import TypedDict - - class ResultDict(TypedDict): - encoding: str | None - language: str - confidence: float | None - - -def detect( - byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any -) -> ResultDict: - """ - chardet legacy method - Detect the encoding of the given byte string. It should be mostly backward-compatible. - Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) - This function is deprecated and should be used to migrate your project easily, consult the documentation for - further information. Not planned for removal. - - :param byte_str: The byte sequence to examine. - :param should_rename_legacy: Should we rename legacy encodings - to their more modern equivalents? - """ - if len(kwargs): - warn( - f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" - ) - - if not isinstance(byte_str, (bytearray, bytes)): - raise TypeError( # pragma: nocover - f"Expected object of type bytes or bytearray, got: {type(byte_str)}" - ) - - if isinstance(byte_str, bytearray): - byte_str = bytes(byte_str) - - r = from_bytes(byte_str).best() - - encoding = r.encoding if r is not None else None - language = r.language if r is not None and r.language != "Unknown" else "" - confidence = 1.0 - r.chaos if r is not None else None - - # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process - # but chardet does return 'utf-8-sig' and it is a valid codec name. - if r is not None and encoding == "utf_8" and r.bom: - encoding += "_sig" - - if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: - encoding = CHARDET_CORRESPONDENCE[encoding] - - return { - "encoding": encoding, - "language": language, - "confidence": confidence, - } diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/md.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/charset_normalizer/md.cpython-312-x86_64-linux-gnu.so deleted file mode 100755 index eb884afb..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/md.cpython-312-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/md.py b/venv/lib/python3.12/site-packages/charset_normalizer/md.py deleted file mode 100644 index 12ce024b..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/md.py +++ /dev/null @@ -1,635 +0,0 @@ -from __future__ import annotations - -from functools import lru_cache -from logging import getLogger - -from .constant import ( - COMMON_SAFE_ASCII_CHARACTERS, - TRACE, - UNICODE_SECONDARY_RANGE_KEYWORD, -) -from .utils import ( - is_accentuated, - is_arabic, - is_arabic_isolated_form, - is_case_variable, - is_cjk, - is_emoticon, - is_hangul, - is_hiragana, - is_katakana, - is_latin, - is_punctuation, - is_separator, - is_symbol, - is_thai, - is_unprintable, - remove_accent, - unicode_range, - is_cjk_uncommon, -) - - -class MessDetectorPlugin: - """ - Base abstract class used for mess detection plugins. - All detectors MUST extend and implement given methods. - """ - - def eligible(self, character: str) -> bool: - """ - Determine if given character should be fed in. - """ - raise NotImplementedError # pragma: nocover - - def feed(self, character: str) -> None: - """ - The main routine to be executed upon character. - Insert the logic in witch the text would be considered chaotic. - """ - raise NotImplementedError # pragma: nocover - - def reset(self) -> None: # pragma: no cover - """ - Permit to reset the plugin to the initial state. - """ - raise NotImplementedError - - @property - def ratio(self) -> float: - """ - Compute the chaos ratio based on what your feed() has seen. - Must NOT be lower than 0.; No restriction gt 0. - """ - raise NotImplementedError # pragma: nocover - - -class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._punctuation_count: int = 0 - self._symbol_count: int = 0 - self._character_count: int = 0 - - self._last_printable_char: str | None = None - self._frenzy_symbol_in_word: bool = False - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character != self._last_printable_char - and character not in COMMON_SAFE_ASCII_CHARACTERS - ): - if is_punctuation(character): - self._punctuation_count += 1 - elif ( - character.isdigit() is False - and is_symbol(character) - and is_emoticon(character) is False - ): - self._symbol_count += 2 - - self._last_printable_char = character - - def reset(self) -> None: # Abstract - self._punctuation_count = 0 - self._character_count = 0 - self._symbol_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - ratio_of_punctuation: float = ( - self._punctuation_count + self._symbol_count - ) / self._character_count - - return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 - - -class TooManyAccentuatedPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._accentuated_count: int = 0 - - def eligible(self, character: str) -> bool: - return character.isalpha() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_accentuated(character): - self._accentuated_count += 1 - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._accentuated_count = 0 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - ratio_of_accentuation: float = self._accentuated_count / self._character_count - return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 - - -class UnprintablePlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._unprintable_count: int = 0 - self._character_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if is_unprintable(character): - self._unprintable_count += 1 - self._character_count += 1 - - def reset(self) -> None: # Abstract - self._unprintable_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._unprintable_count * 8) / self._character_count - - -class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._successive_count: int = 0 - self._character_count: int = 0 - - self._last_latin_character: str | None = None - - def eligible(self, character: str) -> bool: - return character.isalpha() and is_latin(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - if ( - self._last_latin_character is not None - and is_accentuated(character) - and is_accentuated(self._last_latin_character) - ): - if character.isupper() and self._last_latin_character.isupper(): - self._successive_count += 1 - # Worse if its the same char duplicated with different accent. - if remove_accent(character) == remove_accent(self._last_latin_character): - self._successive_count += 1 - self._last_latin_character = character - - def reset(self) -> None: # Abstract - self._successive_count = 0 - self._character_count = 0 - self._last_latin_character = None - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._successive_count * 2) / self._character_count - - -class SuspiciousRange(MessDetectorPlugin): - def __init__(self) -> None: - self._suspicious_successive_range_count: int = 0 - self._character_count: int = 0 - self._last_printable_seen: str | None = None - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character.isspace() - or is_punctuation(character) - or character in COMMON_SAFE_ASCII_CHARACTERS - ): - self._last_printable_seen = None - return - - if self._last_printable_seen is None: - self._last_printable_seen = character - return - - unicode_range_a: str | None = unicode_range(self._last_printable_seen) - unicode_range_b: str | None = unicode_range(character) - - if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): - self._suspicious_successive_range_count += 1 - - self._last_printable_seen = character - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._suspicious_successive_range_count = 0 - self._last_printable_seen = None - - @property - def ratio(self) -> float: - if self._character_count <= 13: - return 0.0 - - ratio_of_suspicious_range_usage: float = ( - self._suspicious_successive_range_count * 2 - ) / self._character_count - - return ratio_of_suspicious_range_usage - - -class SuperWeirdWordPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._word_count: int = 0 - self._bad_word_count: int = 0 - self._foreign_long_count: int = 0 - - self._is_current_word_bad: bool = False - self._foreign_long_watch: bool = False - - self._character_count: int = 0 - self._bad_character_count: int = 0 - - self._buffer: str = "" - self._buffer_accent_count: int = 0 - self._buffer_glyph_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if character.isalpha(): - self._buffer += character - if is_accentuated(character): - self._buffer_accent_count += 1 - if ( - self._foreign_long_watch is False - and (is_latin(character) is False or is_accentuated(character)) - and is_cjk(character) is False - and is_hangul(character) is False - and is_katakana(character) is False - and is_hiragana(character) is False - and is_thai(character) is False - ): - self._foreign_long_watch = True - if ( - is_cjk(character) - or is_hangul(character) - or is_katakana(character) - or is_hiragana(character) - or is_thai(character) - ): - self._buffer_glyph_count += 1 - return - if not self._buffer: - return - if ( - character.isspace() or is_punctuation(character) or is_separator(character) - ) and self._buffer: - self._word_count += 1 - buffer_length: int = len(self._buffer) - - self._character_count += buffer_length - - if buffer_length >= 4: - if self._buffer_accent_count / buffer_length >= 0.5: - self._is_current_word_bad = True - # Word/Buffer ending with an upper case accentuated letter are so rare, - # that we will consider them all as suspicious. Same weight as foreign_long suspicious. - elif ( - is_accentuated(self._buffer[-1]) - and self._buffer[-1].isupper() - and all(_.isupper() for _ in self._buffer) is False - ): - self._foreign_long_count += 1 - self._is_current_word_bad = True - elif self._buffer_glyph_count == 1: - self._is_current_word_bad = True - self._foreign_long_count += 1 - if buffer_length >= 24 and self._foreign_long_watch: - camel_case_dst = [ - i - for c, i in zip(self._buffer, range(0, buffer_length)) - if c.isupper() - ] - probable_camel_cased: bool = False - - if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): - probable_camel_cased = True - - if not probable_camel_cased: - self._foreign_long_count += 1 - self._is_current_word_bad = True - - if self._is_current_word_bad: - self._bad_word_count += 1 - self._bad_character_count += len(self._buffer) - self._is_current_word_bad = False - - self._foreign_long_watch = False - self._buffer = "" - self._buffer_accent_count = 0 - self._buffer_glyph_count = 0 - elif ( - character not in {"<", ">", "-", "=", "~", "|", "_"} - and character.isdigit() is False - and is_symbol(character) - ): - self._is_current_word_bad = True - self._buffer += character - - def reset(self) -> None: # Abstract - self._buffer = "" - self._is_current_word_bad = False - self._foreign_long_watch = False - self._bad_word_count = 0 - self._word_count = 0 - self._character_count = 0 - self._bad_character_count = 0 - self._foreign_long_count = 0 - - @property - def ratio(self) -> float: - if self._word_count <= 10 and self._foreign_long_count == 0: - return 0.0 - - return self._bad_character_count / self._character_count - - -class CjkUncommonPlugin(MessDetectorPlugin): - """ - Detect messy CJK text that probably means nothing. - """ - - def __init__(self) -> None: - self._character_count: int = 0 - self._uncommon_count: int = 0 - - def eligible(self, character: str) -> bool: - return is_cjk(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_cjk_uncommon(character): - self._uncommon_count += 1 - return - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._uncommon_count = 0 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - uncommon_form_usage: float = self._uncommon_count / self._character_count - - # we can be pretty sure it's garbage when uncommon characters are widely - # used. otherwise it could just be traditional chinese for example. - return uncommon_form_usage / 10 if uncommon_form_usage > 0.5 else 0.0 - - -class ArchaicUpperLowerPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._buf: bool = False - - self._character_count_since_last_sep: int = 0 - - self._successive_upper_lower_count: int = 0 - self._successive_upper_lower_count_final: int = 0 - - self._character_count: int = 0 - - self._last_alpha_seen: str | None = None - self._current_ascii_only: bool = True - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - is_concerned = character.isalpha() and is_case_variable(character) - chunk_sep = is_concerned is False - - if chunk_sep and self._character_count_since_last_sep > 0: - if ( - self._character_count_since_last_sep <= 64 - and character.isdigit() is False - and self._current_ascii_only is False - ): - self._successive_upper_lower_count_final += ( - self._successive_upper_lower_count - ) - - self._successive_upper_lower_count = 0 - self._character_count_since_last_sep = 0 - self._last_alpha_seen = None - self._buf = False - self._character_count += 1 - self._current_ascii_only = True - - return - - if self._current_ascii_only is True and character.isascii() is False: - self._current_ascii_only = False - - if self._last_alpha_seen is not None: - if (character.isupper() and self._last_alpha_seen.islower()) or ( - character.islower() and self._last_alpha_seen.isupper() - ): - if self._buf is True: - self._successive_upper_lower_count += 2 - self._buf = False - else: - self._buf = True - else: - self._buf = False - - self._character_count += 1 - self._character_count_since_last_sep += 1 - self._last_alpha_seen = character - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._character_count_since_last_sep = 0 - self._successive_upper_lower_count = 0 - self._successive_upper_lower_count_final = 0 - self._last_alpha_seen = None - self._buf = False - self._current_ascii_only = True - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return self._successive_upper_lower_count_final / self._character_count - - -class ArabicIsolatedFormPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._isolated_form_count: int = 0 - - def reset(self) -> None: # Abstract - self._character_count = 0 - self._isolated_form_count = 0 - - def eligible(self, character: str) -> bool: - return is_arabic(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_arabic_isolated_form(character): - self._isolated_form_count += 1 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - isolated_form_usage: float = self._isolated_form_count / self._character_count - - return isolated_form_usage - - -@lru_cache(maxsize=1024) -def is_suspiciously_successive_range( - unicode_range_a: str | None, unicode_range_b: str | None -) -> bool: - """ - Determine if two Unicode range seen next to each other can be considered as suspicious. - """ - if unicode_range_a is None or unicode_range_b is None: - return True - - if unicode_range_a == unicode_range_b: - return False - - if "Latin" in unicode_range_a and "Latin" in unicode_range_b: - return False - - if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: - return False - - # Latin characters can be accompanied with a combining diacritical mark - # eg. Vietnamese. - if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( - "Combining" in unicode_range_a or "Combining" in unicode_range_b - ): - return False - - keywords_range_a, keywords_range_b = ( - unicode_range_a.split(" "), - unicode_range_b.split(" "), - ) - - for el in keywords_range_a: - if el in UNICODE_SECONDARY_RANGE_KEYWORD: - continue - if el in keywords_range_b: - return False - - # Japanese Exception - range_a_jp_chars, range_b_jp_chars = ( - unicode_range_a - in ( - "Hiragana", - "Katakana", - ), - unicode_range_b in ("Hiragana", "Katakana"), - ) - if (range_a_jp_chars or range_b_jp_chars) and ( - "CJK" in unicode_range_a or "CJK" in unicode_range_b - ): - return False - if range_a_jp_chars and range_b_jp_chars: - return False - - if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: - if "CJK" in unicode_range_a or "CJK" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - # Chinese/Japanese use dedicated range for punctuation and/or separators. - if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( - unicode_range_a in ["Katakana", "Hiragana"] - and unicode_range_b in ["Katakana", "Hiragana"] - ): - if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: - return False - if "Forms" in unicode_range_a or "Forms" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - return True - - -@lru_cache(maxsize=2048) -def mess_ratio( - decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False -) -> float: - """ - Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. - """ - - detectors: list[MessDetectorPlugin] = [ - md_class() for md_class in MessDetectorPlugin.__subclasses__() - ] - - length: int = len(decoded_sequence) + 1 - - mean_mess_ratio: float = 0.0 - - if length < 512: - intermediary_mean_mess_ratio_calc: int = 32 - elif length <= 1024: - intermediary_mean_mess_ratio_calc = 64 - else: - intermediary_mean_mess_ratio_calc = 128 - - for character, index in zip(decoded_sequence + "\n", range(length)): - for detector in detectors: - if detector.eligible(character): - detector.feed(character) - - if ( - index > 0 and index % intermediary_mean_mess_ratio_calc == 0 - ) or index == length - 1: - mean_mess_ratio = sum(dt.ratio for dt in detectors) - - if mean_mess_ratio >= maximum_threshold: - break - - if debug: - logger = getLogger("charset_normalizer") - - logger.log( - TRACE, - "Mess-detector extended-analysis start. " - f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " - f"maximum_threshold={maximum_threshold}", - ) - - if len(decoded_sequence) > 16: - logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") - logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") - - for dt in detectors: - logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") - - return round(mean_mess_ratio, 3) diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/md__mypyc.cpython-312-x86_64-linux-gnu.so b/venv/lib/python3.12/site-packages/charset_normalizer/md__mypyc.cpython-312-x86_64-linux-gnu.so deleted file mode 100755 index 40455262..00000000 Binary files a/venv/lib/python3.12/site-packages/charset_normalizer/md__mypyc.cpython-312-x86_64-linux-gnu.so and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/models.py b/venv/lib/python3.12/site-packages/charset_normalizer/models.py deleted file mode 100644 index 1042758f..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/models.py +++ /dev/null @@ -1,360 +0,0 @@ -from __future__ import annotations - -from encodings.aliases import aliases -from hashlib import sha256 -from json import dumps -from re import sub -from typing import Any, Iterator, List, Tuple - -from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE -from .utils import iana_name, is_multi_byte_encoding, unicode_range - - -class CharsetMatch: - def __init__( - self, - payload: bytes, - guessed_encoding: str, - mean_mess_ratio: float, - has_sig_or_bom: bool, - languages: CoherenceMatches, - decoded_payload: str | None = None, - preemptive_declaration: str | None = None, - ): - self._payload: bytes = payload - - self._encoding: str = guessed_encoding - self._mean_mess_ratio: float = mean_mess_ratio - self._languages: CoherenceMatches = languages - self._has_sig_or_bom: bool = has_sig_or_bom - self._unicode_ranges: list[str] | None = None - - self._leaves: list[CharsetMatch] = [] - self._mean_coherence_ratio: float = 0.0 - - self._output_payload: bytes | None = None - self._output_encoding: str | None = None - - self._string: str | None = decoded_payload - - self._preemptive_declaration: str | None = preemptive_declaration - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CharsetMatch): - if isinstance(other, str): - return iana_name(other) == self.encoding - return False - return self.encoding == other.encoding and self.fingerprint == other.fingerprint - - def __lt__(self, other: object) -> bool: - """ - Implemented to make sorted available upon CharsetMatches items. - """ - if not isinstance(other, CharsetMatch): - raise ValueError - - chaos_difference: float = abs(self.chaos - other.chaos) - coherence_difference: float = abs(self.coherence - other.coherence) - - # Below 1% difference --> Use Coherence - if chaos_difference < 0.01 and coherence_difference > 0.02: - return self.coherence > other.coherence - elif chaos_difference < 0.01 and coherence_difference <= 0.02: - # When having a difficult decision, use the result that decoded as many multi-byte as possible. - # preserve RAM usage! - if len(self._payload) >= TOO_BIG_SEQUENCE: - return self.chaos < other.chaos - return self.multi_byte_usage > other.multi_byte_usage - - return self.chaos < other.chaos - - @property - def multi_byte_usage(self) -> float: - return 1.0 - (len(str(self)) / len(self.raw)) - - def __str__(self) -> str: - # Lazy Str Loading - if self._string is None: - self._string = str(self._payload, self._encoding, "strict") - return self._string - - def __repr__(self) -> str: - return f"" - - def add_submatch(self, other: CharsetMatch) -> None: - if not isinstance(other, CharsetMatch) or other == self: - raise ValueError( - "Unable to add instance <{}> as a submatch of a CharsetMatch".format( - other.__class__ - ) - ) - - other._string = None # Unload RAM usage; dirty trick. - self._leaves.append(other) - - @property - def encoding(self) -> str: - return self._encoding - - @property - def encoding_aliases(self) -> list[str]: - """ - Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. - """ - also_known_as: list[str] = [] - for u, p in aliases.items(): - if self.encoding == u: - also_known_as.append(p) - elif self.encoding == p: - also_known_as.append(u) - return also_known_as - - @property - def bom(self) -> bool: - return self._has_sig_or_bom - - @property - def byte_order_mark(self) -> bool: - return self._has_sig_or_bom - - @property - def languages(self) -> list[str]: - """ - Return the complete list of possible languages found in decoded sequence. - Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. - """ - return [e[0] for e in self._languages] - - @property - def language(self) -> str: - """ - Most probable language found in decoded sequence. If none were detected or inferred, the property will return - "Unknown". - """ - if not self._languages: - # Trying to infer the language based on the given encoding - # Its either English or we should not pronounce ourselves in certain cases. - if "ascii" in self.could_be_from_charset: - return "English" - - # doing it there to avoid circular import - from charset_normalizer.cd import encoding_languages, mb_encoding_languages - - languages = ( - mb_encoding_languages(self.encoding) - if is_multi_byte_encoding(self.encoding) - else encoding_languages(self.encoding) - ) - - if len(languages) == 0 or "Latin Based" in languages: - return "Unknown" - - return languages[0] - - return self._languages[0][0] - - @property - def chaos(self) -> float: - return self._mean_mess_ratio - - @property - def coherence(self) -> float: - if not self._languages: - return 0.0 - return self._languages[0][1] - - @property - def percent_chaos(self) -> float: - return round(self.chaos * 100, ndigits=3) - - @property - def percent_coherence(self) -> float: - return round(self.coherence * 100, ndigits=3) - - @property - def raw(self) -> bytes: - """ - Original untouched bytes. - """ - return self._payload - - @property - def submatch(self) -> list[CharsetMatch]: - return self._leaves - - @property - def has_submatch(self) -> bool: - return len(self._leaves) > 0 - - @property - def alphabets(self) -> list[str]: - if self._unicode_ranges is not None: - return self._unicode_ranges - # list detected ranges - detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)] - # filter and sort - self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) - return self._unicode_ranges - - @property - def could_be_from_charset(self) -> list[str]: - """ - The complete list of encoding that output the exact SAME str result and therefore could be the originating - encoding. - This list does include the encoding available in property 'encoding'. - """ - return [self._encoding] + [m.encoding for m in self._leaves] - - def output(self, encoding: str = "utf_8") -> bytes: - """ - Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. - Any errors will be simply ignored by the encoder NOT replaced. - """ - if self._output_encoding is None or self._output_encoding != encoding: - self._output_encoding = encoding - decoded_string = str(self) - if ( - self._preemptive_declaration is not None - and self._preemptive_declaration.lower() - not in ["utf-8", "utf8", "utf_8"] - ): - patched_header = sub( - RE_POSSIBLE_ENCODING_INDICATION, - lambda m: m.string[m.span()[0] : m.span()[1]].replace( - m.groups()[0], - iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type] - ), - decoded_string[:8192], - count=1, - ) - - decoded_string = patched_header + decoded_string[8192:] - - self._output_payload = decoded_string.encode(encoding, "replace") - - return self._output_payload # type: ignore - - @property - def fingerprint(self) -> str: - """ - Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. - """ - return sha256(self.output()).hexdigest() - - -class CharsetMatches: - """ - Container with every CharsetMatch items ordered by default from most probable to the less one. - Act like a list(iterable) but does not implements all related methods. - """ - - def __init__(self, results: list[CharsetMatch] | None = None): - self._results: list[CharsetMatch] = sorted(results) if results else [] - - def __iter__(self) -> Iterator[CharsetMatch]: - yield from self._results - - def __getitem__(self, item: int | str) -> CharsetMatch: - """ - Retrieve a single item either by its position or encoding name (alias may be used here). - Raise KeyError upon invalid index or encoding not present in results. - """ - if isinstance(item, int): - return self._results[item] - if isinstance(item, str): - item = iana_name(item, False) - for result in self._results: - if item in result.could_be_from_charset: - return result - raise KeyError - - def __len__(self) -> int: - return len(self._results) - - def __bool__(self) -> bool: - return len(self._results) > 0 - - def append(self, item: CharsetMatch) -> None: - """ - Insert a single match. Will be inserted accordingly to preserve sort. - Can be inserted as a submatch. - """ - if not isinstance(item, CharsetMatch): - raise ValueError( - "Cannot append instance '{}' to CharsetMatches".format( - str(item.__class__) - ) - ) - # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) - if len(item.raw) < TOO_BIG_SEQUENCE: - for match in self._results: - if match.fingerprint == item.fingerprint and match.chaos == item.chaos: - match.add_submatch(item) - return - self._results.append(item) - self._results = sorted(self._results) - - def best(self) -> CharsetMatch | None: - """ - Simply return the first match. Strict equivalent to matches[0]. - """ - if not self._results: - return None - return self._results[0] - - def first(self) -> CharsetMatch | None: - """ - Redundant method, call the method best(). Kept for BC reasons. - """ - return self.best() - - -CoherenceMatch = Tuple[str, float] -CoherenceMatches = List[CoherenceMatch] - - -class CliDetectionResult: - def __init__( - self, - path: str, - encoding: str | None, - encoding_aliases: list[str], - alternative_encodings: list[str], - language: str, - alphabets: list[str], - has_sig_or_bom: bool, - chaos: float, - coherence: float, - unicode_path: str | None, - is_preferred: bool, - ): - self.path: str = path - self.unicode_path: str | None = unicode_path - self.encoding: str | None = encoding - self.encoding_aliases: list[str] = encoding_aliases - self.alternative_encodings: list[str] = alternative_encodings - self.language: str = language - self.alphabets: list[str] = alphabets - self.has_sig_or_bom: bool = has_sig_or_bom - self.chaos: float = chaos - self.coherence: float = coherence - self.is_preferred: bool = is_preferred - - @property - def __dict__(self) -> dict[str, Any]: # type: ignore - return { - "path": self.path, - "encoding": self.encoding, - "encoding_aliases": self.encoding_aliases, - "alternative_encodings": self.alternative_encodings, - "language": self.language, - "alphabets": self.alphabets, - "has_sig_or_bom": self.has_sig_or_bom, - "chaos": self.chaos, - "coherence": self.coherence, - "unicode_path": self.unicode_path, - "is_preferred": self.is_preferred, - } - - def to_json(self) -> str: - return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/py.typed b/venv/lib/python3.12/site-packages/charset_normalizer/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/utils.py b/venv/lib/python3.12/site-packages/charset_normalizer/utils.py deleted file mode 100644 index 6bf0384c..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/utils.py +++ /dev/null @@ -1,414 +0,0 @@ -from __future__ import annotations - -import importlib -import logging -import unicodedata -from codecs import IncrementalDecoder -from encodings.aliases import aliases -from functools import lru_cache -from re import findall -from typing import Generator - -from _multibytecodec import ( # type: ignore[import-not-found,import] - MultibyteIncrementalDecoder, -) - -from .constant import ( - ENCODING_MARKS, - IANA_SUPPORTED_SIMILAR, - RE_POSSIBLE_ENCODING_INDICATION, - UNICODE_RANGES_COMBINED, - UNICODE_SECONDARY_RANGE_KEYWORD, - UTF8_MAXIMAL_ALLOCATION, - COMMON_CJK_CHARACTERS, -) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_accentuated(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - return ( - "WITH GRAVE" in description - or "WITH ACUTE" in description - or "WITH CEDILLA" in description - or "WITH DIAERESIS" in description - or "WITH CIRCUMFLEX" in description - or "WITH TILDE" in description - or "WITH MACRON" in description - or "WITH RING ABOVE" in description - ) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def remove_accent(character: str) -> str: - decomposed: str = unicodedata.decomposition(character) - if not decomposed: - return character - - codes: list[str] = decomposed.split(" ") - - return chr(int(codes[0], 16)) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def unicode_range(character: str) -> str | None: - """ - Retrieve the Unicode range official name from a single character. - """ - character_ord: int = ord(character) - - for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): - if character_ord in ord_range: - return range_name - - return None - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_latin(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - return "LATIN" in description - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_punctuation(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "P" in character_category: - return True - - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Punctuation" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_symbol(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "S" in character_category or "N" in character_category: - return True - - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Forms" in character_range and character_category != "Lo" - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_emoticon(character: str) -> bool: - character_range: str | None = unicode_range(character) - - if character_range is None: - return False - - return "Emoticons" in character_range or "Pictographs" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_separator(character: str) -> bool: - if character.isspace() or character in {"|", "+", "<", ">"}: - return True - - character_category: str = unicodedata.category(character) - - return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_case_variable(character: str) -> bool: - return character.islower() != character.isupper() - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "CJK" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hiragana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "HIRAGANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_katakana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "KATAKANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hangul(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "HANGUL" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_thai(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "THAI" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "ARABIC" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic_isolated_form(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: # Defensive: unicode database outdated? - return False - - return "ARABIC" in character_name and "ISOLATED FORM" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk_uncommon(character: str) -> bool: - return character not in COMMON_CJK_CHARACTERS - - -@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) -def is_unicode_range_secondary(range_name: str) -> bool: - return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_unprintable(character: str) -> bool: - return ( - character.isspace() is False # includes \n \t \r \v - and character.isprintable() is False - and character != "\x1a" # Why? Its the ASCII substitute character. - and character != "\ufeff" # bug discovered in Python, - # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. - ) - - -def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None: - """ - Extract using ASCII-only decoder any specified encoding in the first n-bytes. - """ - if not isinstance(sequence, bytes): - raise TypeError - - seq_len: int = len(sequence) - - results: list[str] = findall( - RE_POSSIBLE_ENCODING_INDICATION, - sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), - ) - - if len(results) == 0: - return None - - for specified_encoding in results: - specified_encoding = specified_encoding.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if encoding_alias == specified_encoding: - return encoding_iana - if encoding_iana == specified_encoding: - return encoding_iana - - return None - - -@lru_cache(maxsize=128) -def is_multi_byte_encoding(name: str) -> bool: - """ - Verify is a specific encoding is a multi byte one based on it IANA name - """ - return name in { - "utf_8", - "utf_8_sig", - "utf_16", - "utf_16_be", - "utf_16_le", - "utf_32", - "utf_32_le", - "utf_32_be", - "utf_7", - } or issubclass( - importlib.import_module(f"encodings.{name}").IncrementalDecoder, - MultibyteIncrementalDecoder, - ) - - -def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]: - """ - Identify and extract SIG/BOM in given sequence. - """ - - for iana_encoding in ENCODING_MARKS: - marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding] - - if isinstance(marks, bytes): - marks = [marks] - - for mark in marks: - if sequence.startswith(mark): - return iana_encoding, mark - - return None, b"" - - -def should_strip_sig_or_bom(iana_encoding: str) -> bool: - return iana_encoding not in {"utf_16", "utf_32"} - - -def iana_name(cp_name: str, strict: bool = True) -> str: - """Returns the Python normalized encoding name (Not the IANA official name).""" - cp_name = cp_name.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if cp_name in [encoding_alias, encoding_iana]: - return encoding_iana - - if strict: - raise ValueError(f"Unable to retrieve IANA for '{cp_name}'") - - return cp_name - - -def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: - if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): - return 0.0 - - decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder - decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder - - id_a: IncrementalDecoder = decoder_a(errors="ignore") - id_b: IncrementalDecoder = decoder_b(errors="ignore") - - character_match_count: int = 0 - - for i in range(255): - to_be_decoded: bytes = bytes([i]) - if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): - character_match_count += 1 - - return character_match_count / 254 - - -def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: - """ - Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using - the function cp_similarity. - """ - return ( - iana_name_a in IANA_SUPPORTED_SIMILAR - and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] - ) - - -def set_logging_handler( - name: str = "charset_normalizer", - level: int = logging.INFO, - format_string: str = "%(asctime)s | %(levelname)s | %(message)s", -) -> None: - logger = logging.getLogger(name) - logger.setLevel(level) - - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter(format_string)) - logger.addHandler(handler) - - -def cut_sequence_chunks( - sequences: bytes, - encoding_iana: str, - offsets: range, - chunk_size: int, - bom_or_sig_available: bool, - strip_sig_or_bom: bool, - sig_payload: bytes, - is_multi_byte_decoder: bool, - decoded_payload: str | None = None, -) -> Generator[str, None, None]: - if decoded_payload and is_multi_byte_decoder is False: - for i in offsets: - chunk = decoded_payload[i : i + chunk_size] - if not chunk: - break - yield chunk - else: - for i in offsets: - chunk_end = i + chunk_size - if chunk_end > len(sequences) + 8: - continue - - cut_sequence = sequences[i : i + chunk_size] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode( - encoding_iana, - errors="ignore" if is_multi_byte_decoder else "strict", - ) - - # multi-byte bad cutting detector and adjustment - # not the cleanest way to perform that fix but clever enough for now. - if is_multi_byte_decoder and i > 0: - chunk_partial_size_chk: int = min(chunk_size, 16) - - if ( - decoded_payload - and chunk[:chunk_partial_size_chk] not in decoded_payload - ): - for j in range(i, i - 4, -1): - cut_sequence = sequences[j:chunk_end] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode(encoding_iana, errors="ignore") - - if chunk[:chunk_partial_size_chk] in decoded_payload: - break - - yield chunk diff --git a/venv/lib/python3.12/site-packages/charset_normalizer/version.py b/venv/lib/python3.12/site-packages/charset_normalizer/version.py deleted file mode 100644 index e5687e3c..00000000 --- a/venv/lib/python3.12/site-packages/charset_normalizer/version.py +++ /dev/null @@ -1,8 +0,0 @@ -""" -Expose version -""" - -from __future__ import annotations - -__version__ = "3.4.2" -VERSION = __version__.split(".") diff --git a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/METADATA deleted file mode 100644 index e6c05af4..00000000 --- a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/METADATA +++ /dev/null @@ -1,82 +0,0 @@ -Metadata-Version: 2.4 -Name: click -Version: 8.2.1 -Summary: Composable command line interface toolkit -Maintainer-email: Pallets -Requires-Python: >=3.10 -Description-Content-Type: text/markdown -License-Expression: BSD-3-Clause -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -License-File: LICENSE.txt -Requires-Dist: colorama; platform_system == 'Windows' -Project-URL: Changes, https://click.palletsprojects.com/page/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/click/ - -# $ click_ - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -## A Simple Example - -```python -import click - -@click.command() -@click.option("--count", default=1, help="Number of greetings.") -@click.option("--name", prompt="Your name", help="The person to greet.") -def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - -if __name__ == '__main__': - hello() -``` - -``` -$ python hello.py --count=3 -Your name: Click -Hello, Click! -Hello, Click! -Hello, Click! -``` - - -## Donate - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - -## Contributing - -See our [detailed contributing documentation][contrib] for many ways to -contribute, including reporting issues, requesting features, asking or answering -questions, and making PRs. - -[contrib]: https://palletsprojects.com/contributing/ - diff --git a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/RECORD deleted file mode 100644 index 61f4cd2b..00000000 --- a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/RECORD +++ /dev/null @@ -1,38 +0,0 @@ -click-8.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.2.1.dist-info/METADATA,sha256=dI1MbhHTLoKD2tNCCGnx9rK2gok23HDNylFeLKdLSik,2471 -click-8.2.1.dist-info/RECORD,, -click-8.2.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 -click-8.2.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473 -click/__pycache__/__init__.cpython-312.pyc,, -click/__pycache__/_compat.cpython-312.pyc,, -click/__pycache__/_termui_impl.cpython-312.pyc,, -click/__pycache__/_textwrap.cpython-312.pyc,, -click/__pycache__/_winconsole.cpython-312.pyc,, -click/__pycache__/core.cpython-312.pyc,, -click/__pycache__/decorators.cpython-312.pyc,, -click/__pycache__/exceptions.cpython-312.pyc,, -click/__pycache__/formatting.cpython-312.pyc,, -click/__pycache__/globals.cpython-312.pyc,, -click/__pycache__/parser.cpython-312.pyc,, -click/__pycache__/shell_completion.cpython-312.pyc,, -click/__pycache__/termui.cpython-312.pyc,, -click/__pycache__/testing.cpython-312.pyc,, -click/__pycache__/types.cpython-312.pyc,, -click/__pycache__/utils.cpython-312.pyc,, -click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693 -click/_termui_impl.py,sha256=ASXhLi9IQIc0Js9KQSS-3-SLZcPet3VqysBf9WgbbpI,26712 -click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400 -click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465 -click/core.py,sha256=gUhpNS9cFBGdEXXdisGVG-eRvGf49RTyFagxulqwdFw,117343 -click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461 -click/exceptions.py,sha256=1rdtXgHJ1b3OjGkN-UpXB9t_HCBihJvh_DtpmLmwn9s,9891 -click/formatting.py,sha256=Bhqx4QXdKQ9W4WKknIwj5KPKFmtduGOuGq1yw_THLZ8,9726 -click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923 -click/parser.py,sha256=nU1Ah2p11q29ul1vNdU9swPo_PUuKrxU6YXToi71q1c,18979 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=CQSGdjgun4ORbOZrXP0CVhEtPx4knsufOkRsDiK64cM,19857 -click/termui.py,sha256=vAYrKC2a7f_NfEIhAThEVYfa__ib5XQbTSCGtJlABRA,30847 -click/testing.py,sha256=2eLdAaCJCGToP5Tw-XN8JjrDb3wbJIfARxg3d0crW5M,18702 -click/types.py,sha256=KBTRxN28cR1VZ5mb9iJX98MQSw_p9SGzljqfEI8z5Tw,38389 -click/utils.py,sha256=b1Mm-usEDBHtEwcPltPIn3zSK4nw2KTp5GC7_oSTlLo,20245 diff --git a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/WHEEL deleted file mode 100644 index d8b9936d..00000000 --- a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.12.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/licenses/LICENSE.txt b/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/licenses/LICENSE.txt deleted file mode 100644 index d12a8491..00000000 --- a/venv/lib/python3.12/site-packages/click-8.2.1.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/click/__init__.py b/venv/lib/python3.12/site-packages/click/__init__.py deleted file mode 100644 index 1aa547c5..00000000 --- a/venv/lib/python3.12/site-packages/click/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" - -from __future__ import annotations - -from .core import Argument as Argument -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - from .core import _BaseCommand - - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - from .core import _MultiCommand - - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - if name == "OptionParser": - from .parser import _OptionParser - - warnings.warn( - "'OptionParser' is deprecated and will be removed in Click 9.0. The" - " old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return _OptionParser - - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Click 9.1. Use feature detection or" - " 'importlib.metadata.version(\"click\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("click") - - raise AttributeError(name) diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 1ca96208..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc deleted file mode 100644 index adfbf1e6..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc deleted file mode 100644 index 3bcd8cb5..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc deleted file mode 100644 index c1b92f9c..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc deleted file mode 100644 index 5a428607..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc deleted file mode 100644 index d206f9f7..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc deleted file mode 100644 index b74329e3..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index d943e5e4..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc deleted file mode 100644 index 868bfe2a..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc deleted file mode 100644 index 3459991a..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc deleted file mode 100644 index 736e3acb..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc deleted file mode 100644 index fa96bf8b..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc deleted file mode 100644 index d30afb8f..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc deleted file mode 100644 index c05dfbae..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc deleted file mode 100644 index af3a8fce..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index a0ac9816..00000000 Binary files a/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/click/_compat.py b/venv/lib/python3.12/site-packages/click/_compat.py deleted file mode 100644 index f2726b93..00000000 --- a/venv/lib/python3.12/site-packages/click/_compat.py +++ /dev/null @@ -1,622 +0,0 @@ -from __future__ import annotations - -import codecs -import collections.abc as cabc -import io -import os -import re -import sys -import typing as t -from types import TracebackType -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -WIN = sys.platform.startswith("win") -auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO[t.Any]) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: str | None, - errors: str | None, - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write(b"") - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: str | None, errors: str | None -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO[t.Any], - encoding: str | None, - errors: str | None, - is_binary: t.Callable[[t.IO[t.Any], bool], bool], - find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO[t.Any], - encoding: str | None, - errors: str | None, - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: str | os.PathLike[str] | int, - mode: str, - encoding: str | None, - errors: str | None, -) -> t.IO[t.Any]: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, -) -> tuple[t.IO[t.Any], bool]: - binary = "b" in mode - filename = os.fspath(filename) - - # Standard streams first. These are simple because they ignore the - # atomic flag. Use fsdecode to handle Path("-"). - if os.fsdecode(filename) == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: int | None = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO[t.Any], af), True - - -class _AtomicFile: - def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> _AtomicFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s: str) -> int: - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write # type: ignore[method-assign] - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None - ) -> t.TextIO | None: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO[t.Any]) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.TextIO | None], - wrapper_func: t.Callable[[], t.TextIO], -) -> t.Callable[[], t.TextIO | None]: - cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.TextIO | None: - stream = src_func() - - if stream is None: - return None - - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/venv/lib/python3.12/site-packages/click/_termui_impl.py b/venv/lib/python3.12/site-packages/click/_termui_impl.py deleted file mode 100644 index 51fd9bf3..00000000 --- a/venv/lib/python3.12/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,839 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" - -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import math -import os -import shlex -import sys -import time -import typing as t -from gettext import gettext as _ -from io import StringIO -from pathlib import Path -from shutil import which -from types import TracebackType - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: cabc.Iterable[V] | None, - length: int | None = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - label: str | None = None, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.hidden = hidden - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label: str = label or "" - - if file is None: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - file = StringIO() - - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width: int = width - self.autowidth: bool = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast("cabc.Iterable[V]", range(length)) - self.iter: cabc.Iterable[V] = iter(iterable) - self.length = length - self.pos: int = 0 - self.avg: list[float] = [] - self.last_eta: float - self.start: float - self.start = self.last_eta = time.time() - self.eta_known: bool = False - self.finished: bool = False - self.max_width: int | None = None - self.entered: bool = False - self.current_item: V | None = None - self._is_atty = isatty(self.file) - self._last_line: str | None = None - - def __enter__(self) -> ProgressBar[V]: - self.entered = True - self.render_progress() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.render_finish() - - def __iter__(self) -> cabc.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.hidden or not self._is_atty: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - import shutil - - if self.hidden: - return - - if not self._is_atty: - # Only output the label once if the output is not a TTY. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width and self.max_width is not None: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: V | None = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> cabc.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if not self._is_atty: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if stdout is None: - stdout = StringIO() - - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - - # Split and normalize the pager command into parts. - pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False) - if pager_cmd_parts: - if WIN: - if _tempfilepager(generator, pager_cmd_parts, color): - return - elif _pipepager(generator, pager_cmd_parts, color): - return - - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if (WIN or sys.platform.startswith("os2")) and _tempfilepager( - generator, ["more"], color - ): - return - if _pipepager(generator, ["less"], color): - return - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if _pipepager(generator, ["more"], color): - return - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - cmd = cmd_parts[0] - cmd_params = cmd_parts[1:] - - cmd_filepath = which(cmd) - if not cmd_filepath: - return False - # Resolves symlinks and produces a normalized absolute path string. - cmd_path = Path(cmd_filepath).resolve() - cmd_name = cmd_path.name - - import subprocess - - # Make a local copy of the environment to not affect the global one. - env = dict(os.environ) - - # If we're piping to less and the user hasn't decided on colors, we enable - # them by default we find the -R flag in the command line arguments. - if color is None and cmd_name == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen( - [str(cmd_path)] + cmd_params, - shell=True, - stdin=subprocess.PIPE, - env=env, - errors="replace", - text=True, - ) - assert c.stdin is not None - try: - for text in generator: - if not color: - text = strip_ansi(text) - - c.stdin.write(text) - except BrokenPipeError: - # In case the pager exited unexpectedly, ignore the broken pipe error. - pass - except Exception as e: - # In case there is an exception we want to close the pager immediately - # and let the caller handle it. - # Otherwise the pager will keep running, and the user may not notice - # the error message, or worse yet it may leave the terminal in a broken state. - c.terminate() - raise e - finally: - # We must close stdin and wait for the pager to exit before we continue - try: - c.stdin.close() - # Close implies flush, so it might throw a BrokenPipeError if the pager - # process exited already. - except BrokenPipeError: - pass - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - return True - - -def _tempfilepager( - generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None -) -> bool: - """Page through text by invoking a program on a temporary file. - - Returns `True` if the command was found, `False` otherwise and thus another - pager should be attempted. - """ - # Split the command into the invoked CLI and its parameters. - if not cmd_parts: - return False - cmd = cmd_parts[0] - - cmd_filepath = which(cmd) - if not cmd_filepath: - return False - # Resolves symlinks and produces a normalized absolute path string. - cmd_path = Path(cmd_filepath).resolve() - - import subprocess - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - subprocess.call([str(cmd_path), filename]) - except OSError: - # Command not found - pass - finally: - os.close(fd) - os.unlink(filename) - - return True - - -def _nullpager( - stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - for editor in "sensible-editor", "vim", "nano": - if which(editor) is not None: - return editor - return "vi" - - def edit_files(self, filenames: cabc.Iterable[str]) -> None: - import subprocess - - editor = self.get_editor() - environ: dict[str, str] | None = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - exc_filename = " ".join(f'"{filename}"' for filename in filenames) - - try: - c = subprocess.Popen( - args=f"{editor} {exc_filename}", env=environ, shell=True - ) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - @t.overload - def edit(self, text: bytes | bytearray) -> bytes | None: ... - - # We cannot know whether or not the type expected is str or bytes when None - # is passed, so str is returned as that was what was done before. - @t.overload - def edit(self, text: str | None) -> str | None: ... - - def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None: - import tempfile - - if text is None: - data = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_files((name,)) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url) - args = ["explorer", f"/select,{url}"] - else: - args = ["start"] - if wait: - args.append("/WAIT") - args.append("") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - elif CYGWIN: - if locate: - url = _unquote_file(url) - args = ["cygstart", os.path.dirname(url)] - else: - args = ["cygstart"] - if wait: - args.append("-w") - args.append(url) - try: - return subprocess.call(args) - except OSError: - # Command not found - return 127 - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> None: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if sys.platform == "win32": - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - - if echo: - func = t.cast(t.Callable[[], str], msvcrt.getwche) - else: - func = t.cast(t.Callable[[], str], msvcrt.getwch) - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - -else: - import termios - import tty - - @contextlib.contextmanager - def raw_terminal() -> cabc.Iterator[int]: - f: t.TextIO | None - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/venv/lib/python3.12/site-packages/click/_textwrap.py b/venv/lib/python3.12/site-packages/click/_textwrap.py deleted file mode 100644 index 97fbee3d..00000000 --- a/venv/lib/python3.12/site-packages/click/_textwrap.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import textwrap -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: list[str], - cur_line: list[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> cabc.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/venv/lib/python3.12/site-packages/click/_winconsole.py b/venv/lib/python3.12/site-packages/click/_winconsole.py deleted file mode 100644 index e56c7c6a..00000000 --- a/venv/lib/python3.12/site-packages/click/_winconsole.py +++ /dev/null @@ -1,296 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -from __future__ import annotations - -import collections.abc as cabc -import io -import sys -import time -import typing as t -from ctypes import Array -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -if t.TYPE_CHECKING: - try: - # Using `typing_extensions.Buffer` instead of `collections.abc` - # on Windows for some reason does not have `Sized` implemented. - from collections.abc import Buffer # type: ignore - except ImportError: - from typing_extensions import Buffer - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ # noqa: RUF012 - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]: - buf = Py_buffer() - flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - out: Array[c_char] = buffer_type.from_address(buf.buf) - return out - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle: int | None) -> None: - self.handle = handle - - def isatty(self) -> t.Literal[True]: - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self) -> t.Literal[True]: - return True - - def readinto(self, b: Buffer) -> int: - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self) -> t.Literal[True]: - return True - - @staticmethod - def _get_error_message(errno: int) -> str: - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b: Buffer) -> int: - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self) -> str: - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: str | None, errors: str | None -) -> t.TextIO | None: - if ( - get_buffer is None - or encoding not in {"utf-16-le", None} - or errors not in {"strict", None} - or not _is_console(f) - ): - return None - - func = _stream_factories.get(f.fileno()) - if func is None: - return None - - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/venv/lib/python3.12/site-packages/click/core.py b/venv/lib/python3.12/site-packages/click/core.py deleted file mode 100644 index f57ada62..00000000 --- a/venv/lib/python3.12/site-packages/click/core.py +++ /dev/null @@ -1,3135 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import errno -import inspect -import os -import sys -import typing as t -from collections import abc -from collections import Counter -from contextlib import AbstractContextManager -from contextlib import contextmanager -from contextlib import ExitStack -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat -from types import TracebackType - -from . import types -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import NoArgsIsHelpError -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _flag_needs_value -from .parser import _OptionParser -from .parser import _split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound="t.Callable[..., t.Any]") -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: Context, incomplete: str -) -> cabc.Iterator[tuple[str, Command]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(Group, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_nested_chain( - base_command: Group, cmd_name: str, cmd: Command, register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, Group): - return - - if register: - message = ( - f"It is not possible to add the group {cmd_name!r} to another" - f" group {base_command.name!r} that is in chain mode." - ) - else: - message = ( - f"Found the group {cmd_name!r} as subcommand to another group " - f" {base_command.name!r} that is in chain mode. This is not supported." - ) - - raise RuntimeError(message) - - -def batch(iterable: cabc.Iterable[V], batch_size: int) -> list[tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size), strict=False)) - - -@contextmanager -def augment_usage_errors( - ctx: Context, param: Parameter | None = None -) -> cabc.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: cabc.Sequence[Parameter], - declaration_order: cabc.Sequence[Parameter], -) -> list[Parameter]: - """Returns all declared parameters in the order they should be processed. - - The declared parameters are re-shuffled depending on the order in which - they were invoked, as well as the eagerness of each parameters. - - The invocation order takes precedence over the declaration order. I.e. the - order in which the user provided them to the CLI is respected. - - This behavior and its effect on callback evaluation is detailed at: - https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order - """ - - def sort_key(item: Parameter) -> tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show the default value for commands. If this - value is not set, it defaults to the value from the parent - context. ``Command.show_default`` overrides this default for the - specific command. - - .. versionchanged:: 8.2 - The ``protected_args`` attribute is deprecated and will be removed in - Click 9.0. ``args`` will contain remaining unparsed tokens. - - .. versionchanged:: 8.1 - The ``show_default`` parameter is overridden by - ``Command.show_default``, instead of the other way around. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: type[HelpFormatter] = HelpFormatter - - def __init__( - self, - command: Command, - parent: Context | None = None, - info_name: str | None = None, - obj: t.Any | None = None, - auto_envvar_prefix: str | None = None, - default_map: cabc.MutableMapping[str, t.Any] | None = None, - terminal_width: int | None = None, - max_content_width: int | None = None, - resilient_parsing: bool = False, - allow_extra_args: bool | None = None, - allow_interspersed_args: bool | None = None, - ignore_unknown_options: bool | None = None, - help_option_names: list[str] | None = None, - token_normalize_func: t.Callable[[str], str] | None = None, - color: bool | None = None, - show_default: bool | None = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: dict[str, t.Any] = {} - #: the leftover arguments. - self.args: list[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self._protected_args: list[str] = [] - #: the collected prefixes of the command's options. - self._opt_prefixes: set[str] = set(parent._opt_prefixes) if parent else set() - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: cabc.MutableMapping[str, t.Any] | None = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: str | None = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: int | None = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: int | None = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: list[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Callable[[str], str] | None = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: str | None = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: bool | None = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: bool | None = show_default - - self._close_callbacks: list[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - @property - def protected_args(self) -> list[str]: - import warnings - - warnings.warn( - "'protected_args' is deprecated and will be removed in Click 9.0." - " 'args' will contain remaining unparsed tokens.", - DeprecationWarning, - stacklevel=2, - ) - return self._protected_args - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> Context: - self._depth += 1 - push_context(self) - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self._depth -= 1 - if self._depth == 0: - self.close() - pop_context() - - @contextmanager - def scope(self, cleanup: bool = True) -> cabc.Iterator[Context]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: AbstractContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._exit_stack.close() - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> Context: - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: type[V]) -> V | None: - """Finds the closest object of a given type.""" - node: Context | None = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def lookup_default( - self, name: str, call: t.Literal[False] = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def lookup_default(self, name: str, call: bool = True) -> t.Any | None: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name) - - if call and callable(value): - return value() - - return value - - return None - - def fail(self, message: str) -> t.NoReturn: - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> t.NoReturn: - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> t.NoReturn: - """Exits the application with a given exit code. - - .. versionchanged:: 8.2 - Callbacks and context managers registered with :meth:`call_on_close` - and :meth:`with_resource` are closed before exiting. - """ - self.close() - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: Command) -> Context: - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - @t.overload - def invoke( - self, callback: t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> V: ... - - @t.overload - def invoke(self, callback: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: ... - - def invoke( - self, callback: Command | t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any - ) -> t.Any | V: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - - .. versionchanged:: 3.2 - A new context is created, and missing arguments use default values. - """ - if isinstance(callback, Command): - other_cmd = callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - callback = t.cast("t.Callable[..., V]", other_cmd.callback) - - ctx = self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, param.get_default(ctx) - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = self - - with augment_usage_errors(self): - with ctx: - return callback(*args, **kwargs) - - def forward(self, cmd: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(cmd, Command): - raise TypeError("Callback is not a command.") - - for param in self.params: - if param not in kwargs: - kwargs[param] = self.params[param] - - return self.invoke(cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> ParameterSource | None: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class Command: - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the command is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. - - .. versionchanged:: 8.2 - This is the base class for all commands, not ``BaseCommand``. - ``deprecated`` can be set to a string as well to customize the - deprecation message. - - .. versionchanged:: 8.1 - ``help``, ``epilog``, and ``short_help`` are stored unprocessed, - all formatting is done when outputting help text, not at init, - and is done even if not using the ``@command`` decorator. - - .. versionchanged:: 8.0 - Added a ``repr`` showing the command name. - - .. versionchanged:: 7.1 - Added the ``no_args_is_help`` parameter. - - .. versionchanged:: 2.0 - Added the ``context_settings`` parameter. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: type[Context] = Context - - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: str | None, - context_settings: cabc.MutableMapping[str, t.Any] | None = None, - callback: t.Callable[..., t.Any] | None = None, - params: list[Parameter] | None = None, - help: str | None = None, - epilog: str | None = None, - short_help: str | None = None, - options_metavar: str | None = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool | str = False, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: cabc.MutableMapping[str, t.Any] = context_settings - - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: list[Parameter] = params or [] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self._help_option = None - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - return { - "name": self.name, - "params": [param.to_info_dict() for param in self.get_params(ctx)], - "help": self.help, - "epilog": self.epilog, - "short_help": self.short_help, - "hidden": self.hidden, - "deprecated": self.deprecated, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> list[Parameter]: - params = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - params = [*params, help_option] - - if __debug__: - import warnings - - opts = [opt for param in params for opt in param.opts] - opts_counter = Counter(opts) - duplicate_opts = (opt for opt, count in opts_counter.items() if count > 1) - - for duplicate_opt in duplicate_opts: - warnings.warn( - ( - f"The parameter {duplicate_opt} is used more than once. " - "Remove its duplicate as parameters should be unique." - ), - stacklevel=3, - ) - - return params - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> list[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> Option | None: - """Returns the help option object. - - Skipped if :attr:`add_help_option` is ``False``. - - .. versionchanged:: 8.1.8 - The help option is now cached to avoid creating it multiple times. - """ - help_option_names = self.get_help_option_names(ctx) - - if not help_option_names or not self.add_help_option: - return None - - # Cache the help option object in private _help_option attribute to - # avoid creating it multiple times. Not doing this will break the - # callback odering by iter_params_for_processing(), which relies on - # object comparison. - if self._help_option is None: - # Avoid circular import. - from .decorators import help_option - - # Apply help_option decorator and pop resulting option - help_option(*help_option_names)(self) - self._help_option = self.params.pop() # type: ignore[assignment] - - return self._help_option - - def make_parser(self, ctx: Context) -> _OptionParser: - """Creates the underlying option parser for this command.""" - parser = _OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - if self.short_help: - text = inspect.cleandoc(self.short_help) - elif self.help: - text = make_default_short_help(self.help, limit) - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - if self.help is not None: - # truncate the help text to the first form feed - text = inspect.cleandoc(self.help).partition("\f")[0] - else: - text = "" - - if self.deprecated: - deprecated_message = ( - f"(DEPRECATED: {self.deprecated})" - if isinstance(self.deprecated, str) - else "(DEPRECATED)" - ) - text = _("{text} {deprecated_message}").format( - text=text, deprecated_message=deprecated_message - ) - - if text: - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - epilog = inspect.cleandoc(self.epilog) - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(epilog) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: Context | None = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class(self, info_name=info_name, parent=parent, **extra) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - value, args = param.handle_parse_result(ctx, opts, args) - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - ctx._opt_prefixes.update(parser._opt_prefixes) - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The command {name!r} is deprecated.{extra_message}" - ).format(name=self.name, extra_message=extra_message) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: list[CompletionItem] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, Group) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx._protected_args - ) - - return results - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: t.Literal[True] = True, - **extra: t.Any, - ) -> t.NoReturn: ... - - @t.overload - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: ... - - def main( - self, - args: cabc.Sequence[str] | None = None, - prog_name: str | None = None, - complete_var: str | None = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt) as e: - echo(file=sys.stderr) - raise Abort() from e - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str | None = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - - .. versionchanged:: 8.2.0 - Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). - """ - if complete_var is None: - complete_name = prog_name.replace("-", "_").replace(".", "_") - complete_var = f"_{complete_name}_COMPLETE".upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class _FakeSubclassCheck(type): - def __subclasscheck__(cls, subclass: type) -> bool: - return issubclass(subclass, cls.__bases__[0]) - - def __instancecheck__(cls, instance: t.Any) -> bool: - return isinstance(instance, cls.__bases__[0]) - - -class _BaseCommand(Command, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Command`` instead. - """ - - -class Group(Command): - """A group is a command that nests other commands (or more groups). - - :param name: The name of the group command. - :param commands: Map names to :class:`Command` objects. Can be a list, which - will use :attr:`Command.name` as the keys. - :param invoke_without_command: Invoke the group's callback even if a - subcommand is not given. - :param no_args_is_help: If no arguments are given, show the group's help and - exit. Defaults to the opposite of ``invoke_without_command``. - :param subcommand_metavar: How to represent the subcommand argument in help. - The default will represent whether ``chain`` is set or not. - :param chain: Allow passing more than one subcommand argument. After parsing - a command's arguments, if any arguments remain another command will be - matched, and so on. - :param result_callback: A function to call after the group's and - subcommand's callbacks. The value returned by the subcommand is passed. - If ``chain`` is enabled, the value will be a list of values returned by - all the commands. If ``invoke_without_command`` is enabled, the value - will be the value returned by the group's callback, or an empty list if - ``chain`` is enabled. - :param kwargs: Other arguments passed to :class:`Command`. - - .. versionchanged:: 8.0 - The ``commands`` argument can be a list of command objects. - - .. versionchanged:: 8.2 - Merged with and replaces the ``MultiCommand`` base class. - """ - - allow_extra_args = True - allow_interspersed_args = False - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: type[Command] | None = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: type[Group] | type[type] | None = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: str | None = None, - commands: cabc.MutableMapping[str, Command] - | cabc.Sequence[Command] - | None = None, - invoke_without_command: bool = False, - no_args_is_help: bool | None = None, - subcommand_metavar: str | None = None, - chain: bool = False, - result_callback: t.Callable[..., t.Any] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: cabc.MutableMapping[str, Command] = commands - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "A group in chain mode cannot have optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def add_command(self, cmd: Command, name: str | None = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_nested_chain(self, name, cmd, register=True) - self.commands[name] = cmd - - @t.overload - def command(self, __func: t.Callable[..., t.Any]) -> Command: ... - - @t.overload - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ... - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command] | Command: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'command(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.command_class and kwargs.get("cls") is None: - kwargs["cls"] = self.command_class - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd: Command = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - @t.overload - def group(self, __func: t.Callable[..., t.Any]) -> Group: ... - - @t.overload - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group]: ... - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Group] | Group: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - func: t.Callable[..., t.Any] | None = None - - if args and callable(args[0]): - assert len(args) == 1 and not kwargs, ( - "Use 'group(**kwargs)(callable)' to provide arguments." - ) - (func,) = args - args = () - - if self.group_class is not None and kwargs.get("cls") is None: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> Group: - cmd: Group = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(value: t.Any, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - inner = old_callback(value, *args, **kwargs) - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv # type: ignore[return-value] - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - """Given a context and a command name, this returns a :class:`Command` - object if it exists or returns ``None``. - """ - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> list[str]: - """Returns a list of subcommand names in the order they should appear.""" - return sorted(self.commands) - - def collect_usage_pieces(self, ctx: Context) -> list[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - raise NoArgsIsHelpError(ctx) - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx._protected_args = rest - ctx.args = [] - elif rest: - ctx._protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx._protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with the group return value for regular - # groups, or an empty list for chained groups. - with ctx: - rv = super().invoke(ctx) - return _process_result([] if self.chain else rv) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx._protected_args, *ctx.args] - ctx.args = [] - ctx._protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: list[str] - ) -> tuple[str | None, Command | None, list[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if _split_opt(cmd_name)[0]: - self.parse_args(ctx, args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class _MultiCommand(Group, metaclass=_FakeSubclassCheck): - """ - .. deprecated:: 8.2 - Will be removed in Click 9.0. Use ``Group`` instead. - """ - - -class CommandCollection(Group): - """A :class:`Group` that looks up subcommands on other groups. If a command - is not found on this group, each registered source is checked in order. - Parameters on a source are not added to this group, and a source's callback - is not invoked when invoking its commands. In other words, this "flattens" - commands in many groups into this one group. - - :param name: The name of the group command. - :param sources: A list of :class:`Group` objects to look up commands from. - :param kwargs: Other arguments passed to :class:`Group`. - - .. versionchanged:: 8.2 - This is a subclass of ``Group``. Commands are looked up first on this - group, then each of its sources. - """ - - def __init__( - self, - name: str | None = None, - sources: list[Group] | None = None, - **kwargs: t.Any, - ) -> None: - super().__init__(name, **kwargs) - #: The list of registered groups. - self.sources: list[Group] = sources or [] - - def add_source(self, group: Group) -> None: - """Add a group as a source of commands.""" - self.sources.append(group) - - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: - rv = super().get_command(ctx, cmd_name) - - if rv is not None: - return rv - - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_nested_chain(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> list[str]: - rv: set[str] = set(super().list_commands(ctx)) - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The latter is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: a string or list of strings that are environment variables - that should be checked. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - :param deprecated: If ``True`` or non-empty string, issues a message - indicating that the argument is deprecated and highlights - its deprecation in --help. The message can be customized - by using a string as the value. A deprecated parameter - cannot be required, a ValueError will be raised otherwise. - - .. versionchanged:: 8.2.0 - Introduction of ``deprecated``. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.2 - Adding duplicate parameter names to a :class:`~click.core.Command` will - result in a ``UserWarning`` being shown. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - type: types.ParamType | t.Any | None = None, - required: bool = False, - default: t.Any | t.Callable[[], t.Any] | None = None, - callback: t.Callable[[Context, Parameter, t.Any], t.Any] | None = None, - nargs: int | None = None, - multiple: bool = False, - metavar: str | None = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: str | cabc.Sequence[str] | None = None, - shell_complete: t.Callable[ - [Context, Parameter, str], list[CompletionItem] | list[str] - ] - | None = None, - deprecated: bool | str = False, - ) -> None: - self.name: str | None - self.opts: list[str] - self.secondary_opts: list[str] - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type: types.ParamType = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self._custom_shell_complete = shell_complete - self.deprecated = deprecated - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - # Skip no default or callable default. - check_default = default if not callable(default) else None - - if check_default is not None: - if multiple: - try: - # Only check the first value against nargs. - check_default = next(_check_iter(check_default), None) - except TypeError: - raise ValueError( - "'default' must be a list when 'multiple' is true." - ) from None - - # Can be None for multiple with empty default. - if nargs != 1 and check_default is not None: - try: - _check_iter(check_default) - except TypeError: - if multiple: - message = ( - "'default' must be a list of lists when 'multiple' is" - " true and 'nargs' != 1." - ) - else: - message = "'default' must be a list when 'nargs' != 1." - - raise ValueError(message) from None - - if nargs > 1 and len(check_default) != nargs: - subject = "item length" if multiple else "length" - raise ValueError( - f"'default' {subject} must match nargs={nargs}." - ) - - if required and deprecated: - raise ValueError( - f"The {self.param_type_name} '{self.human_readable_name}' " - "is deprecated and still required. A deprecated " - f"{self.param_type_name} cannot be required." - ) - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - "default": self.default, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(param=self, ctx=ctx) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @t.overload - def get_default( - self, ctx: Context, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Any | t.Callable[[], t.Any] | None: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is None: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, t.Any] - ) -> tuple[t.Any, ParameterSource]: - value = opts.get(self.name) # type: ignore - source = ParameterSource.COMMANDLINE - - if value is None: - value = self.value_from_envvar(ctx) - source = ParameterSource.ENVIRONMENT - - if value is None: - value = ctx.lookup_default(self.name) # type: ignore - source = ParameterSource.DEFAULT_MAP - - if value is None: - value = self.get_default(ctx) - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the option's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - return () if self.multiple or self.nargs == -1 else None - - def check_iter(value: t.Any) -> cabc.Iterator[t.Any]: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - if self.nargs == 1 or self.type.is_composite: - - def convert(value: t.Any) -> t.Any: - return self.type(value, param=self, ctx=ctx) - - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...] - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - if value is None: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - def resolve_envvar_value(self, ctx: Context) -> str | None: - if self.envvar is None: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Any | None: - rv: t.Any | None = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - rv = self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: cabc.Mapping[str, t.Any], args: list[str] - ) -> tuple[t.Any, list[str]]: - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - - if ( - self.deprecated - and value is not None - and source - not in ( - ParameterSource.DEFAULT, - ParameterSource.DEFAULT_MAP, - ) - ): - extra_message = ( - f" {self.deprecated}" if isinstance(self.deprecated, str) else "" - ) - message = _( - "DeprecationWarning: The {param_type} {name!r} is deprecated." - "{extra_message}" - ).format( - param_type=self.param_type_name, - name=self.human_readable_name, - extra_message=extra_message, - ) - echo(style(message, fg="red"), err=True) - - ctx.set_parameter_source(self.name, source) # type: ignore - - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - - value = None - - if self.expose_value: - ctx.params[self.name] = value # type: ignore - - return value, args - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - pass - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast("list[CompletionItem]", results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: Show the default value for this option in its - help text. Values are not shown by default, unless - :attr:`Context.show_default` is ``True``. If this value is a - string, it shows that string in parentheses instead of the - actual value. This is particularly useful for dynamic options. - For single option boolean flags, the default remains hidden if - its value is ``False``. - :param show_envvar: Controls if an environment variable should be - shown on the help page and error messages. - Normally, environment variables are not shown. - :param prompt: If set to ``True`` or a non empty string then the - user will be prompted for input. If set to ``True`` the prompt - will be the option name capitalized. A deprecated option cannot be - prompted. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: If this is ``True`` then the input on the prompt - will be hidden from the user. This is useful for password input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - :param attrs: Other command arguments described in :class:`Parameter`. - - .. versionchanged:: 8.2 - ``envvar`` used with ``flag_value`` will always use the ``flag_value``, - previously it would use the value of the environment variable. - - .. versionchanged:: 8.1 - Help text indentation is cleaned here instead of only in the - ``@option`` decorator. - - .. versionchanged:: 8.1 - The ``show_default`` parameter overrides - ``Context.show_default``. - - .. versionchanged:: 8.1 - The default of a single option boolean flag is not shown if the - default value is ``False``. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: cabc.Sequence[str] | None = None, - show_default: bool | str | None = None, - prompt: bool | str = False, - confirmation_prompt: bool | str = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: bool | None = None, - flag_value: t.Any | None = None, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: types.ParamType | t.Any | None = None, - help: str | None = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - deprecated: bool | str = False, - **attrs: t.Any, - ) -> None: - if help: - help = inspect.cleandoc(help) - - default_is_missing = "default" not in attrs - super().__init__( - param_decls, type=type, multiple=multiple, deprecated=deprecated, **attrs - ) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: str | None = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - - if deprecated: - deprecated_message = ( - f"(DEPRECATED: {deprecated})" - if isinstance(deprecated, str) - else "(DEPRECATED)" - ) - help = help + deprecated_message if help is not None else deprecated_message - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # If prompt is enabled but not required, then the option can be - # used as a flag to indicate using prompt or flag_value. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - if is_flag is None: - if flag_value is not None: - # Implicitly a flag because flag_value was set. - is_flag = True - elif self._flag_needs_value: - # Not a flag, but when used as a flag it shows a prompt. - is_flag = False - else: - # Implicitly a flag because flag options were given. - is_flag = bool(self.secondary_opts) - elif is_flag is False and not self._flag_needs_value: - # Not a flag, and prompt is not enabled, can be used as a - # flag if flag_value is set. - self._flag_needs_value = flag_value is not None - - self.default: t.Any | t.Callable[[], t.Any] - - if is_flag and default_is_missing and not self.required: - if multiple: - self.default = () - else: - self.default = False - - if is_flag and flag_value is None: - flag_value = not self.default - - self.type: types.ParamType - if is_flag and type is None: - # Re-guess the type from the flag value instead of the - # default. - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = is_flag - self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) - self.flag_value: t.Any = flag_value - - # Counting - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if default_is_missing: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if deprecated and prompt: - raise ValueError("`deprecated` options cannot use `prompt`.") - - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if self.prompt and self.is_flag and not self.is_bool_flag: - raise TypeError("'prompt' is not valid for non-boolean flag.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - flag_value=self.flag_value, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def get_error_hint(self, ctx: Context) -> str: - result = super().get_error_hint(ctx) - if self.show_envvar: - result += f" (env var: '{self.envvar}')" - return result - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(_split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(_split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError( - f"Could not determine name for option with declarations {decls!r}" - ) - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> tuple[str, str] | None: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: cabc.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar(ctx=ctx)}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - - extra = self.get_help_extra(ctx) - extra_items = [] - if "envvars" in extra: - extra_items.append( - _("env var: {var}").format(var=", ".join(extra["envvars"])) - ) - if "default" in extra: - extra_items.append(_("default: {default}").format(default=extra["default"])) - if "range" in extra: - extra_items.append(extra["range"]) - if "required" in extra: - extra_items.append(_(extra["required"])) - - if extra_items: - extra_str = "; ".join(extra_items) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - def get_help_extra(self, ctx: Context) -> types.OptionHelpExtra: - extra: types.OptionHelpExtra = {} - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - if isinstance(envvar, str): - extra["envvars"] = (envvar,) - else: - extra["envvars"] = tuple(str(d) for d in envvar) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default = False - show_default_is_str = False - - if self.show_default is not None: - if isinstance(self.show_default, str): - show_default_is_str = show_default = True - else: - show_default = self.show_default - elif ctx.show_default is not None: - show_default = ctx.show_default - - if show_default_is_str or (show_default and (default_value is not None)): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif inspect.isfunction(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = _split_opt( - (self.opts if default_value else self.secondary_opts)[0] - )[1] - elif self.is_bool_flag and not self.secondary_opts and not default_value: - default_string = "" - elif default_value == "": - default_string = '""' - else: - default_string = str(default_value) - - if default_string: - extra["default"] = default_string - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra["range"] = range_str - - if self.required: - extra["required"] = "required" - - return extra - - @t.overload - def get_default( - self, ctx: Context, call: t.Literal[True] = True - ) -> t.Any | None: ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Any | t.Callable[[], t.Any] | None: ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Any | t.Callable[[], t.Any] | None: - # If we're a non boolean flag our default is more complex because - # we need to look at all flags in the same group to figure out - # if we're the default one in which case we return the flag - # value as default. - if self.is_flag and not self.is_bool_flag: - for param in ctx.command.params: - if param.name == self.name and param.default: - return t.cast(Option, param).flag_value - - return None - - return super().get_default(ctx, call=call) - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to be stable. - default = self.get_default(ctx) - - # If this is a prompt for a flag we need to handle this - # differently. - if self.is_bool_flag: - return confirm(self.prompt, default) - - # If show_default is set to True/False, provide this to `prompt` as well. For - # non-bool values of `show_default`, we use `prompt`'s default behavior - prompt_kwargs: t.Any = {} - if isinstance(self.show_default, bool): - prompt_kwargs["show_default"] = self.show_default - - return prompt( - self.prompt, - default=default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - **prompt_kwargs, - ) - - def resolve_envvar_value(self, ctx: Context) -> str | None: - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - if self.is_flag and self.flag_value: - return str(self.flag_value) - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Any | None: - rv: t.Any | None = self.resolve_envvar_value(ctx) - - if rv is None: - return None - - value_depth = (self.nargs != 1) + bool(self.multiple) - - if value_depth > 0: - rv = self.type.split_envvar_value(rv) - - if self.multiple and self.nargs != 1: - rv = batch(rv, self.nargs) - - return rv - - def consume_value( - self, ctx: Context, opts: cabc.Mapping[str, Parameter] - ) -> tuple[t.Any, ParameterSource]: - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option can be - # given as a flag without a value. This is different from None - # to distinguish from the flag not being given at all. - if value is _flag_needs_value: - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - elif ( - self.multiple - and value is not None - and any(v is _flag_needs_value for v in value) - ): - value = [self.flag_value if v is _flag_needs_value else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt if - # prompting is enabled. - elif ( - source in {None, ParameterSource.DEFAULT} - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the constructor of :class:`Parameter`. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: cabc.Sequence[str], - required: bool | None = None, - **attrs: t.Any, - ) -> None: - if required is None: - if attrs.get("default") is not None: - required = False - else: - required = attrs.get("nargs", 1) > 0 - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - if __debug__: - if self.default is not None and self.nargs == -1: - raise TypeError("'default' is not supported for nargs=-1.") - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self, ctx: Context) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(param=self, ctx=ctx) - if not var: - var = self.name.upper() # type: ignore - if self.deprecated: - var += "!" - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: cabc.Sequence[str], expose_value: bool - ) -> tuple[str | None, list[str], list[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Argument is marked as exposed, but does not have a name.") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}: {decls}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> list[str]: - return [self.make_metavar(ctx)] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar(ctx)}'" - - def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) - - -def __getattr__(name: str) -> object: - import warnings - - if name == "BaseCommand": - warnings.warn( - "'BaseCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Command' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _BaseCommand - - if name == "MultiCommand": - warnings.warn( - "'MultiCommand' is deprecated and will be removed in Click 9.0. Use" - " 'Group' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _MultiCommand - - raise AttributeError(name) diff --git a/venv/lib/python3.12/site-packages/click/decorators.py b/venv/lib/python3.12/site-packages/click/decorators.py deleted file mode 100644 index 21f4c342..00000000 --- a/venv/lib/python3.12/site-packages/click/decorators.py +++ /dev/null @@ -1,551 +0,0 @@ -from __future__ import annotations - -import inspect -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") -T = t.TypeVar("T") -_AnyCallable = t.Callable[..., t.Any] -FC = t.TypeVar("FC", bound="_AnyCallable | Command") - - -def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]: - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(new_func, f) - - -def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - -def make_pass_decorator( - object_type: type[T], ensure: bool = False -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - - obj: T | None - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - return decorator - - -def pass_meta_key( - key: str, *, doc_description: str | None = None -) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]: - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]: - def new_func(*args: P.args, **kwargs: P.kwargs) -> R: - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator - - -CmdType = t.TypeVar("CmdType", bound=Command) - - -# variant: no call, directly as decorator for a function. -@t.overload -def command(name: _AnyCallable) -> Command: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) -@t.overload -def command( - name: str | None, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) -@t.overload -def command( - name: None = None, - *, - cls: type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def command( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Command]: ... - - -def command( - name: str | _AnyCallable | None = None, - cls: type[CmdType] | None = None, - **attrs: t.Any, -) -> Command | t.Callable[[_AnyCallable], Command | CmdType]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function, converted to - lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes - ``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example, - ``init_data_command`` becomes ``init-data``. - - All keyword arguments are forwarded to the underlying command class. - For the ``params`` argument, any decorated params are appended to - the end of the list. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: The name of the command. Defaults to modifying the function's - name as described above. - :param cls: The command class to create. Defaults to :class:`Command`. - - .. versionchanged:: 8.2 - The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are - removed when generating the name. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.1 - The ``params`` argument can be used. Decorated params are - appended to the end of the list. - """ - - func: t.Callable[[_AnyCallable], t.Any] | None = None - - if callable(name): - func = name - name = None - assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." - assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." - - if cls is None: - cls = t.cast("type[CmdType]", Command) - - def decorator(f: _AnyCallable) -> CmdType: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - attr_params = attrs.pop("params", None) - params = attr_params if attr_params is not None else [] - - try: - decorator_params = f.__click_params__ # type: ignore - except AttributeError: - pass - else: - del f.__click_params__ # type: ignore - params.extend(reversed(decorator_params)) - - if attrs.get("help") is None: - attrs["help"] = f.__doc__ - - if t.TYPE_CHECKING: - assert cls is not None - assert not callable(name) - - if name is not None: - cmd_name = name - else: - cmd_name = f.__name__.lower().replace("_", "-") - cmd_left, sep, suffix = cmd_name.rpartition("-") - - if sep and suffix in {"command", "cmd", "group", "grp"}: - cmd_name = cmd_left - - cmd = cls(name=cmd_name, callback=f, params=params, **attrs) - cmd.__doc__ = f.__doc__ - return cmd - - if func is not None: - return decorator(func) - - return decorator - - -GrpType = t.TypeVar("GrpType", bound=Group) - - -# variant: no call, directly as decorator for a function. -@t.overload -def group(name: _AnyCallable) -> Group: ... - - -# variant: with positional name and with positional or keyword cls argument: -# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) -@t.overload -def group( - name: str | None, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) -@t.overload -def group( - name: None = None, - *, - cls: type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def group( - name: str | None = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Group]: ... - - -def group( - name: str | _AnyCallable | None = None, - cls: type[GrpType] | None = None, - **attrs: t.Any, -) -> Group | t.Callable[[_AnyCallable], Group | GrpType]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - """ - if cls is None: - cls = t.cast("type[GrpType]", Group) - - if callable(name): - return command(cls=cls, **attrs)(name) - - return command(name, cls, **attrs) - - -def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument( - *param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default argument class, refer to :class:`Argument` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Argument - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def option( - *param_decls: str, cls: type[Option] | None = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default option class, refer to :class:`Option` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Option - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: str | None = None, - *param_decls: str, - package_name: str | None = None, - prog_name: str | None = None, - message: str | None = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - import importlib.metadata - - try: - version = importlib.metadata.version(package_name) - except importlib.metadata.PackageNotFoundError: - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - message % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Pre-configured ``--help`` option which immediately prints the help page - and exits the program. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def show_help(ctx: Context, param: Parameter, value: bool) -> None: - """Callback that print the help page on ```` and exits.""" - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs.setdefault("callback", show_help) - - return option(*param_decls, **kwargs) diff --git a/venv/lib/python3.12/site-packages/click/exceptions.py b/venv/lib/python3.12/site-packages/click/exceptions.py deleted file mode 100644 index f141a832..00000000 --- a/venv/lib/python3.12/site-packages/click/exceptions.py +++ /dev/null @@ -1,308 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .globals import resolve_color_default -from .utils import echo -from .utils import format_filename - -if t.TYPE_CHECKING: - from .core import Command - from .core import Context - from .core import Parameter - - -def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - # The context will be removed by the time we print the message, so cache - # the color settings here to be used later on (in `show`) - self.show_color: bool | None = resolve_color_default() - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=self.show_color, - ) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: Context | None = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd: Command | None = self.ctx.command if self.ctx else None - - def show(self, file: t.IO[t.Any] | None = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: str | None = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: str | None = None, - ctx: Context | None = None, - param: Parameter | None = None, - param_hint: str | None = None, - param_type: str | None = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: str | None = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message( - param=self.param, ctx=self.ctx - ) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: str | None = None, - possibilities: cabc.Sequence[str] | None = None, - ctx: Context | None = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: Context | None = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class NoArgsIsHelpError(UsageError): - def __init__(self, ctx: Context) -> None: - self.ctx: Context - super().__init__(ctx.get_help(), ctx=ctx) - - def show(self, file: t.IO[t.Any] | None = None) -> None: - echo(self.format_message(), file=file, err=True, color=self.ctx.color) - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: str | None = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename: str = format_filename(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code: int = code diff --git a/venv/lib/python3.12/site-packages/click/formatting.py b/venv/lib/python3.12/site-packages/click/formatting.py deleted file mode 100644 index 9891f880..00000000 --- a/venv/lib/python3.12/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import _split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: int | None = None - - -def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]: - widths: dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: cabc.Iterable[tuple[str, str]], col_count: int -) -> cabc.Iterator[tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: list[tuple[int, bool, str]] = [] - buf: list[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: int | None = None, - max_width: int | None = None, - ) -> None: - import shutil - - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent: int = 0 - self.buffer: list[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: cabc.Sequence[tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> cabc.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> cabc.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = _split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/lib/python3.12/site-packages/click/globals.py b/venv/lib/python3.12/site-packages/click/globals.py deleted file mode 100644 index a2f91723..00000000 --- a/venv/lib/python3.12/site-packages/click/globals.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import annotations - -import typing as t -from threading import local - -if t.TYPE_CHECKING: - from .core import Context - -_local = local() - - -@t.overload -def get_current_context(silent: t.Literal[False] = False) -> Context: ... - - -@t.overload -def get_current_context(silent: bool = ...) -> Context | None: ... - - -def get_current_context(silent: bool = False) -> Context | None: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: Context) -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: bool | None = None) -> bool | None: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/venv/lib/python3.12/site-packages/click/parser.py b/venv/lib/python3.12/site-packages/click/parser.py deleted file mode 100644 index a8b7d263..00000000 --- a/venv/lib/python3.12/site-packages/click/parser.py +++ /dev/null @@ -1,532 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" - -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -from __future__ import annotations - -import collections.abc as cabc -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - -# Sentinel value that indicates an option was passed as a flag without a -# value but is not a flag option. Option.consume_value uses this to -# prompt or use the flag_value. -_flag_needs_value = object() - - -def _unpack_args( - args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int] -) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with `None`. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: list[str | tuple[str | None, ...] | None] = [] - spos: int | None = None - - def _fetch(c: deque[V]) -> V | None: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return None - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(None) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def _split_opt(opt: str) -> tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def _normalize_opt(opt: str, ctx: Context | None) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = _split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -class _Option: - def __init__( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes: set[str] = set() - - for opt in opts: - prefix, value = _split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: t.Any, state: _ParsingState) -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class _Argument: - def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: str | cabc.Sequence[str | None] | None, - state: _ParsingState, - ) -> None: - if self.nargs > 1: - assert value is not None - holes = sum(1 for x in value if x is None) - if holes == len(value): - value = None - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - if self.nargs == -1 and self.obj.envvar is not None and value == (): - # Replace empty tuple with None so that a value from the - # environment may be tried. - value = None - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class _ParsingState: - def __init__(self, rargs: list[str]) -> None: - self.opts: dict[str, t.Any] = {} - self.largs: list[str] = [] - self.rargs = rargs - self.order: list[CoreParameter] = [] - - -class _OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - - .. deprecated:: 8.2 - Will be removed in Click 9.0. - """ - - def __init__(self, ctx: Context | None = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args: bool = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options: bool = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: dict[str, _Option] = {} - self._long_opt: dict[str, _Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: list[_Argument] = [] - - def add_option( - self, - obj: CoreOption, - opts: cabc.Sequence[str], - dest: str | None, - action: str | None = None, - nargs: int = 1, - const: t.Any | None = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [_normalize_opt(opt, self.ctx) for opt in opts] - option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(_Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: list[str] - ) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = _ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: _ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: _ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: str | None, state: _ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = None - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: _ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = _normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = None - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we recombine the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: _Option, state: _ParsingState - ) -> t.Any: - nargs = option.nargs - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = _flag_needs_value - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = _flag_needs_value - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: _ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = _normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) - - -def __getattr__(name: str) -> object: - import warnings - - if name in { - "OptionParser", - "Argument", - "Option", - "split_opt", - "normalize_opt", - "ParsingState", - }: - warnings.warn( - f"'parser.{name}' is deprecated and will be removed in Click 9.0." - " The old parser is available in 'optparse'.", - DeprecationWarning, - stacklevel=2, - ) - return globals()[f"_{name}"] - - if name == "split_arg_string": - from .shell_completion import split_arg_string - - warnings.warn( - "Importing 'parser.split_arg_string' is deprecated, it will only be" - " available in 'shell_completion' in Click 9.0.", - DeprecationWarning, - stacklevel=2, - ) - return split_arg_string - - raise AttributeError(name) diff --git a/venv/lib/python3.12/site-packages/click/py.typed b/venv/lib/python3.12/site-packages/click/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/click/shell_completion.py b/venv/lib/python3.12/site-packages/click/shell_completion.py deleted file mode 100644 index 6c39d5eb..00000000 --- a/venv/lib/python3.12/site-packages/click/shell_completion.py +++ /dev/null @@ -1,644 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .utils import echo - - -def shell_complete( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: str | None = None, - **kwargs: t.Any, - ) -> None: - self.value: t.Any = value - self.type: str = type - self.help: str | None = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMPREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMPREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -if [[ $zsh_eval_context[-1] == loadautofunc ]]; then - # autoload from fpath, call function directly - %(complete_func)s "$@" -else - # eval/source/. command, register function for later - compdef %(complete_func)s %(prog_name)s -fi -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> tuple[list[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - @staticmethod - def _check_version() -> None: - import shutil - import subprocess - - bash_exe = shutil.which("bash") - - if bash_exe is None: - match = None - else: - output = subprocess.run( - [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'], - stdout=subprocess.PIPE, - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - echo( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ), - err=True, - ) - else: - echo( - _("Couldn't detect Bash version, shell completion is not supported."), - err=True, - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> tuple[list[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]") - - -_available_shells: dict[str, type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: ShellCompleteType, name: str | None = None -) -> ShellCompleteType: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - return cls - - -def get_completion_class(shell: str) -> type[ShellComplete] | None: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def split_arg_string(string: str) -> list[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - - .. versionchanged:: 8.2 - Moved to ``shell_completion`` from ``parser``. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - # Will be None if expose_value is False. - value = ctx.params.get(param.name) - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(ctx: Context, value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - return c in ctx._opt_prefixes - - -def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag or param.count: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(ctx, arg): - last_option = arg - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: Command, - ctx_args: cabc.MutableMapping[str, t.Any], - prog_name: str, - args: list[str], -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx: - args = ctx._protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, Group): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, args, parent=ctx, resilient_parsing=True - ) as sub_ctx: - ctx = sub_ctx - args = ctx._protected_args + ctx.args - else: - sub_ctx = ctx - - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - with cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) as sub_sub_ctx: - sub_ctx = sub_sub_ctx - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx._protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: list[str], incomplete: str -) -> tuple[Command | Parameter, str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(ctx, incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(ctx, incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(ctx, args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/venv/lib/python3.12/site-packages/click/termui.py b/venv/lib/python3.12/site-packages/click/termui.py deleted file mode 100644 index dcbb2221..00000000 --- a/venv/lib/python3.12/site-packages/click/termui.py +++ /dev/null @@ -1,877 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import inspect -import io -import itertools -import sys -import typing as t -from contextlib import AbstractContextManager -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Any | None = None, - show_choices: bool = True, - type: ParamType | None = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name - - return default - - -def prompt( - text: str, - default: t.Any | None = None, - hide_input: bool = False, - confirmation_prompt: bool | str = False, - type: ParamType | t.Any | None = None, - value_proc: t.Callable[[str], t.Any] | None = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending an interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(" ") - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) - continue - if not confirmation_prompt: - return result - while True: - value2 = prompt_func(confirmation_prompt) - is_empty = not value and not value2 - if value2 or is_empty: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: bool | None = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(" ").lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def echo_via_pager( - text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str, - color: bool | None = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast("cabc.Iterable[str]", text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -@t.overload -def progressbar( - *, - length: int, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[int]: ... - - -@t.overload -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: ... - - -def progressbar( - iterable: cabc.Iterable[V] | None = None, - length: int | None = None, - label: str | None = None, - hidden: bool = False, - show_eta: bool = True, - show_percent: bool | None = None, - show_pos: bool = False, - item_show_func: t.Callable[[V | None], str | None] | None = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.TextIO | None = None, - color: bool | None = None, - update_min_steps: int = 1, -) -> ProgressBar[V]: - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param hidden: hide the progressbar. Defaults to ``False``. When no tty is - detected, it will only print the progressbar label. Setting this to - ``False`` also disables that. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionadded:: 8.2 - The ``hidden`` argument. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - The ``update_min_steps`` parameter. - - .. versionadded:: 4.0 - The ``color`` parameter and ``update`` method. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - hidden=hidden, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - - # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor - echo("\033[2J\033[1;1H", nl=False) - - -def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: int | tuple[int, int, int] | str | None = None, - bg: int | tuple[int, int, int] | str | None = None, - bold: bool | None = None, - dim: bool | None = None, - underline: bool | None = None, - overline: bool | None = None, - italic: bool | None = None, - blink: bool | None = None, - reverse: bool | None = None, - strikethrough: bool | None = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Any | None = None, - file: t.IO[t.AnyStr] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -@t.overload -def edit( - text: bytes | bytearray, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = False, - extension: str = ".txt", -) -> bytes | None: ... - - -@t.overload -def edit( - text: str, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", -) -> str | None: ... - - -@t.overload -def edit( - text: None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> None: ... - - -def edit( - text: str | bytes | bytearray | None = None, - editor: str | None = None, - env: cabc.Mapping[str, str] | None = None, - require_save: bool = True, - extension: str = ".txt", - filename: str | cabc.Iterable[str] | None = None, -) -> str | bytes | bytearray | None: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. If the editor supports - editing multiple files at once, a sequence of files may be - passed as well. Invoke `click.file` once per file instead - if multiple files cannot be managed at once or editing the - files serially is desired. - - .. versionchanged:: 8.2.0 - ``filename`` now accepts any ``Iterable[str]`` in addition to a ``str`` - if the ``editor`` supports editing multiple files at once. - - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - if isinstance(filename, str): - filename = (filename,) - - ed.edit_files(filenames=filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Callable[[bool], str] | None = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> AbstractContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: str | None = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/venv/lib/python3.12/site-packages/click/testing.py b/venv/lib/python3.12/site-packages/click/testing.py deleted file mode 100644 index 7c0e8741..00000000 --- a/venv/lib/python3.12/site-packages/click/testing.py +++ /dev/null @@ -1,565 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import contextlib -import io -import os -import shlex -import shutil -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import _compat -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from _typeshed import ReadableBuffer - - from .core import Command - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> list[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> cabc.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class BytesIOCopy(io.BytesIO): - """Patch ``io.BytesIO`` to let the written stream be copied to another. - - .. versionadded:: 8.2 - """ - - def __init__(self, copy_to: io.BytesIO) -> None: - super().__init__() - self.copy_to = copy_to - - def flush(self) -> None: - super().flush() - self.copy_to.flush() - - def write(self, b: ReadableBuffer) -> int: - self.copy_to.write(b) - return super().write(b) - - -class StreamMixer: - """Mixes `` and `` streams. - - The result is available in the ``output`` attribute. - - .. versionadded:: 8.2 - """ - - def __init__(self) -> None: - self.output: io.BytesIO = io.BytesIO() - self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output) - self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output) - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - def __next__(self) -> str: # type: ignore - try: - line = super().__next__() - except StopIteration as e: - raise EOFError() from e - return line - - -def make_input_stream( - input: str | bytes | t.IO[t.Any] | None, charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast("t.IO[t.Any]", input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(input) - - -class Result: - """Holds the captured result of an invoked CLI script. - - :param runner: The runner that created the result - :param stdout_bytes: The standard output as bytes. - :param stderr_bytes: The standard error as bytes. - :param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the - user would see it in its terminal. - :param return_value: The value returned from the invoked command. - :param exit_code: The exit code as integer. - :param exception: The exception that happened if one did. - :param exc_info: Exception information (exception type, exception instance, - traceback type). - - .. versionchanged:: 8.2 - ``stderr_bytes`` no longer optional, ``output_bytes`` introduced and - ``mix_stderr`` has been removed. - - .. versionadded:: 8.0 - Added ``return_value``. - """ - - def __init__( - self, - runner: CliRunner, - stdout_bytes: bytes, - stderr_bytes: bytes, - output_bytes: bytes, - return_value: t.Any, - exit_code: int, - exception: BaseException | None, - exc_info: tuple[type[BaseException], BaseException, TracebackType] - | None = None, - ): - self.runner = runner - self.stdout_bytes = stdout_bytes - self.stderr_bytes = stderr_bytes - self.output_bytes = output_bytes - self.return_value = return_value - self.exit_code = exit_code - self.exception = exception - self.exc_info = exc_info - - @property - def output(self) -> str: - """The terminal output as unicode string, as the user would see it. - - .. versionchanged:: 8.2 - No longer a proxy for ``self.stdout``. Now has its own independent stream - that is mixing `` and ``, in the order they were written. - """ - return self.output_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string. - - .. versionchanged:: 8.2 - No longer raise an exception, always returns the `` string. - """ - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from `` writes - to ``. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param catch_exceptions: Whether to catch any exceptions other than - ``SystemExit`` when running :meth:`~CliRunner.invoke`. - - .. versionchanged:: 8.2 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 8.2 - ``mix_stderr`` parameter has been removed. - """ - - def __init__( - self, - charset: str = "utf-8", - env: cabc.Mapping[str, str | None] | None = None, - echo_stdin: bool = False, - catch_exceptions: bool = True, - ) -> None: - self.charset = charset - self.env: cabc.Mapping[str, str | None] = env or {} - self.echo_stdin = echo_stdin - self.catch_exceptions = catch_exceptions - - def get_default_prog_name(self, cli: Command) -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: cabc.Mapping[str, str | None] | None = None - ) -> cabc.Mapping[str, str | None]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - color: bool = False, - ) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up `` with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into `sys.stdin`. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - An additional output stream is returned, which is a mix of - `` and `` streams. - - .. versionchanged:: 8.2 - Always returns the `` stream. - - .. versionchanged:: 8.0 - `` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - stream_mixer = StreamMixer() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - stream_mixer.stdout, encoding=self.charset, name="", mode="w" - ) - - sys.stderr = _NamedTextIOWrapper( - stream_mixer.stderr, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: str | None = None) -> str: - sys.stdout.write(prompt or "") - val = next(text_input).rstrip("\r\n") - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: str | None = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - return next(text_input).rstrip("\r\n") - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.IO[t.Any] | None = None, color: bool | None = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - old__compat_should_strip_ansi = _compat.should_strip_ansi - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - _compat.should_strip_ansi = should_strip_ansi - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - _compat.should_strip_ansi = old__compat_should_strip_ansi - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: Command, - args: str | cabc.Sequence[str] | None = None, - input: str | bytes | t.IO[t.Any] | None = None, - env: cabc.Mapping[str, str | None] | None = None, - catch_exceptions: bool | None = None, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. If :data:`None`, the value - from :class:`CliRunner` is used. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionadded:: 8.2 - The result object has the ``output_bytes`` attribute with - the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would - see it in its terminal. - - .. versionchanged:: 8.2 - The result object always returns the ``stderr_bytes`` stream. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - if catch_exceptions is None: - catch_exceptions = self.catch_exceptions - - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: BaseException | None = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast("int | t.Any | None", e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - sys.stderr.flush() - stdout = outstreams[0].getvalue() - stderr = outstreams[1].getvalue() - output = outstreams[2].getvalue() - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - output_bytes=output, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: str | os.PathLike[str] | None = None - ) -> cabc.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - dt = tempfile.mkdtemp(dir=temp_dir) - os.chdir(dt) - - try: - yield dt - finally: - os.chdir(cwd) - - if temp_dir is None: - try: - shutil.rmtree(dt) - except OSError: - pass diff --git a/venv/lib/python3.12/site-packages/click/types.py b/venv/lib/python3.12/site-packages/click/types.py deleted file mode 100644 index 684cb3b1..00000000 --- a/venv/lib/python3.12/site-packages/click/types.py +++ /dev/null @@ -1,1165 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import enum -import os -import stat -import sys -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import format_filename -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - -ParamTypeValue = t.TypeVar("ParamTypeValue") - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[str | None] = None - - def to_info_dict(self) -> dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - - # Custom subclasses might not remember to set a name. - if hasattr(self, "name"): - name = self.name - else: - name = param_type - - return {"param_type": param_type, "name": name} - - def __call__( - self, - value: t.Any, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str | None: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> cabc.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: Parameter | None = None, - ctx: Context | None = None, - ) -> t.NoReturn: - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name: str = func.__name__ - self.func = func - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = sys.getfilesystemencoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType, t.Generic[ParamTypeValue]): - """The choice type allows a value to be checked against a fixed set - of supported values. - - You may pass any iterable value which will be converted to a tuple - and thus will only be iterated once. - - The resulting value will always be one of the originally passed choices. - See :meth:`normalize_choice` for more info on the mapping of strings - to choices. See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - - .. versionchanged:: 8.2.0 - Non-``str`` ``choices`` are now supported. It can additionally be any - iterable. Before you were not recommended to pass anything but a list or - tuple. - - .. versionadded:: 8.2.0 - Choice normalization can be overridden via :meth:`normalize_choice`. - """ - - name = "choice" - - def __init__( - self, choices: cabc.Iterable[ParamTypeValue], case_sensitive: bool = True - ) -> None: - self.choices: cabc.Sequence[ParamTypeValue] = tuple(choices) - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def _normalized_mapping( - self, ctx: Context | None = None - ) -> cabc.Mapping[ParamTypeValue, str]: - """ - Returns mapping where keys are the original choices and the values are - the normalized values that are accepted via the command line. - - This is a simple wrapper around :meth:`normalize_choice`, use that - instead which is supported. - """ - return { - choice: self.normalize_choice( - choice=choice, - ctx=ctx, - ) - for choice in self.choices - } - - def normalize_choice(self, choice: ParamTypeValue, ctx: Context | None) -> str: - """ - Normalize a choice value, used to map a passed string to a choice. - Each choice must have a unique normalized value. - - By default uses :meth:`Context.token_normalize_func` and if not case - sensitive, convert it to a casefolded value. - - .. versionadded:: 8.2.0 - """ - normed_value = choice.name if isinstance(choice, enum.Enum) else str(choice) - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(normed_value) - - if not self.case_sensitive: - normed_value = normed_value.casefold() - - return normed_value - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - if param.param_type_name == "option" and not param.show_choices: # type: ignore - choice_metavars = [ - convert_type(type(choice)).name.upper() for choice in self.choices - ] - choices_str = "|".join([*dict.fromkeys(choice_metavars)]) - else: - choices_str = "|".join( - [str(i) for i in self._normalized_mapping(ctx=ctx).values()] - ) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: Parameter, ctx: Context | None) -> str: - """ - Message shown when no choice is passed. - - .. versionchanged:: 8.2.0 Added ``ctx`` argument. - """ - return _("Choose from:\n\t{choices}").format( - choices=",\n\t".join(self._normalized_mapping(ctx=ctx).values()) - ) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> ParamTypeValue: - """ - For a given value from the parser, normalize it and find its - matching normalized value in the list of choices. Then return the - matched "original" choice. - """ - normed_value = self.normalize_choice(choice=value, ctx=ctx) - normalized_mapping = self._normalized_mapping(ctx=ctx) - - try: - return next( - original - for original, normalized in normalized_mapping.items() - if normalized == normed_value - ) - except StopIteration: - self.fail( - self.get_invalid_choice_message(value=value, ctx=ctx), - param=param, - ctx=ctx, - ) - - def get_invalid_choice_message(self, value: t.Any, ctx: Context | None) -> str: - """Get the error message when the given choice is invalid. - - :param value: The invalid value. - - .. versionadded:: 8.2 - """ - choices_str = ", ".join(map(repr, self._normalized_mapping(ctx=ctx).values())) - return ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: cabc.Sequence[str] | None = None): - self.formats: cabc.Sequence[str] = formats or [ - "%Y-%m-%d", - "%Y-%m-%dT%H:%M:%S", - "%Y-%m-%d %H:%M:%S", - ] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: Parameter, ctx: Context) -> str | None: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> datetime | None: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[type[t.Any]] - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: t.Literal[1, -1], open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: float | None = None, - max: float | None = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float: - if not open: - return bound - - # Could use math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - if value in {False, True}: - return bool(value) - - norm = value.strip().lower() - - if norm in {"1", "true", "t", "yes", "y", "on"}: - return True - - if norm in {"0", "false", "f", "no", "n", "off"}: - return False - - self.fail( - _("{value!r} is not a valid boolean.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Files can also be opened atomically in which case all writes go into a - separate file in the same folder and upon completion the file will - be moved over to the original location. This is useful if a file - regularly read by other users is modified. - - See :ref:`file-args` for more information. - - .. versionchanged:: 2.0 - Added the ``atomic`` parameter. - """ - - name = "filename" - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool | None = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: str | os.PathLike[str]) -> bool: - if self.lazy is not None: - return self.lazy - if os.fspath(value) == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, - value: str | os.PathLike[str] | t.IO[t.Any], - param: Parameter | None, - ctx: Context | None, - ) -> t.IO[t.Any]: - if _is_file_like(value): - return value - - value = t.cast("str | os.PathLike[str]", value) - - try: - lazy = self.resolve_lazy_flag(value) - - if lazy: - lf = LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - if ctx is not None: - ctx.call_on_close(lf.close_intelligently) - - return t.cast("t.IO[t.Any]", lf) - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: - self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -def _is_file_like(value: t.Any) -> te.TypeGuard[t.IO[t.Any]]: - return hasattr(value, "read") or hasattr(value, "write") - - -class Path(ParamType): - """The ``Path`` type is similar to the :class:`File` type, but - returns the filename instead of an open file. Various checks can be - enabled to validate the type of file and permissions. - - :param exists: The file or directory needs to exist for the value to - be valid. If this is not set to ``True``, and the file does not - exist, then all further checks are silently skipped. - :param file_okay: Allow a file as a value. - :param dir_okay: Allow a directory as a value. - :param readable: if true, a readable check is performed. - :param writable: if true, a writable check is performed. - :param executable: if true, an executable check is performed. - :param resolve_path: Make the value absolute and resolve any - symlinks. A ``~`` is not expanded, as this is supposed to be - done by the shell only. - :param allow_dash: Allow a single dash as a value, which indicates - a standard stream (but does not open it). Use - :func:`~click.open_file` to handle opening this value. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.1 - Added the ``executable`` parameter. - - .. versionchanged:: 8.0 - Allow passing ``path_type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: type[t.Any] | None = None, - executable: bool = False, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.readable = readable - self.writable = writable - self.executable = executable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name: str = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result( - self, value: str | os.PathLike[str] - ) -> str | bytes | os.PathLike[str]: - if self.type is not None and not isinstance(value, self.type): - if self.type is str: - return os.fsdecode(value) - elif self.type is bytes: - return os.fsencode(value) - else: - return t.cast("os.PathLike[str]", self.type(value)) - - return value - - def convert( - self, - value: str | os.PathLike[str], - param: Parameter | None, - ctx: Context | None, - ) -> str | bytes | os.PathLike[str]: - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - rv = os.path.realpath(rv) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} {filename!r} is a directory.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.executable and not os.access(value, os.X_OK): - self.fail( - _("{name} {filename!r} is not executable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: Context, param: Parameter, incomplete: str - ) -> list[CompletionItem]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: cabc.Sequence[type[t.Any] | ParamType]) -> None: - self.types: cabc.Sequence[ParamType] = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: Parameter | None, ctx: Context | None - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple( - ty(x, param, ctx) for ty, x in zip(self.types, value, strict=False) - ) - - -def convert_type(ty: t.Any | None, default: t.Any | None = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() - - -class OptionHelpExtra(t.TypedDict, total=False): - envvars: tuple[str, ...] - default: str - range: str - required: str diff --git a/venv/lib/python3.12/site-packages/click/utils.py b/venv/lib/python3.12/site-packages/click/utils.py deleted file mode 100644 index ab2fe588..00000000 --- a/venv/lib/python3.12/site-packages/click/utils.py +++ /dev/null @@ -1,627 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import re -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType -from types import TracebackType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]: - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None: - try: - return func(*args, **kwargs) - except Exception: - pass - return None - - return update_wrapper(wrapper, func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(sys.getfilesystemencoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - atomic: bool = False, - ): - self.name: str = os.fspath(filename) - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.IO[t.Any] | None - self.should_close: bool - - if self.name == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO[t.Any]: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> LazyFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.close_intelligently() - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO[t.Any]) -> None: - self._file: t.IO[t.Any] = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> KeepOpenFile: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> cabc.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Any | None = None, - file: t.IO[t.Any] | None = None, - nl: bool = True, - err: bool = False, - color: bool | None = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - return - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: str | bytes | None = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file, color) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: t.Literal["stdin", "stdout", "stderr"], - encoding: str | None = None, - errors: str | None = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str | os.PathLike[str], - mode: str = "r", - encoding: str | None = None, - errors: str | None = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO[t.Any]: - """Open a file, with extra behavior to handle ``'-'`` to indicate - a standard stream, lazy open on write, and atomic write. Similar to - the behavior of the :class:`~click.File` param type. - - If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is - wrapped so that using it in a context manager will not close it. - This makes it possible to use the function without accidentally - closing a standard stream: - - .. code-block:: python - - with open_file(filename) as f: - ... - - :param filename: The name or Path of the file to open, or ``'-'`` for - ``stdin``/``stdout``. - :param mode: The mode in which to open the file. - :param encoding: The encoding to decode or encode a file opened in - text mode. - :param errors: The error handling mode. - :param lazy: Wait to open the file until it is accessed. For read - mode, the file is temporarily opened to raise access errors - early, then closed until it is read again. - :param atomic: Write to a temporary file and replace the given file - on close. - - .. versionadded:: 3.0 - """ - if lazy: - return t.cast( - "t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic) - ) - - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - - if not should_close: - f = t.cast("t.IO[t.Any]", KeepOpenFile(f)) - - return f - - -def format_filename( - filename: str | bytes | os.PathLike[str] | os.PathLike[bytes], - shorten: bool = False, -) -> str: - """Format a filename as a string for display. Ensures the filename can be - displayed by replacing any invalid bytes or surrogate escapes in the name - with the replacement character ``�``. - - Invalid bytes or surrogate escapes will raise an error when written to a - stream with ``errors="strict"``. This will typically happen with ``stdout`` - when the locale is something like ``en_GB.UTF-8``. - - Many scenarios *are* safe to write surrogates though, due to PEP 538 and - PEP 540, including: - - - Writing to ``stderr``, which uses ``errors="backslashreplace"``. - - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens - stdout and stderr with ``errors="surrogateescape"``. - - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. - - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. - Python opens stdout and stderr with ``errors="surrogateescape"``. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - else: - filename = os.fspath(filename) - - if isinstance(filename, bytes): - filename = filename.decode(sys.getfilesystemencoding(), "replace") - else: - filename = filename.encode("utf-8", "surrogateescape").decode( - "utf-8", "replace" - ) - - return filename - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no effect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO[t.Any]) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: str | None = None, _main: ModuleType | None = None -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if _main is None: - _main = sys.modules["__main__"] - - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - # It is set to "" inside a Shiv or PEX zipapp. - if getattr(_main, "__package__", None) in {None, ""} or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: cabc.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> list[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This is intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionchanged:: 8.1 - Invalid glob patterns are treated as empty expansions rather - than raising an error. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - try: - matches = glob(arg, recursive=glob_recursive) - except re.error: - matches = [] - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/venv/lib/python3.12/site-packages/dns/__init__.py b/venv/lib/python3.12/site-packages/dns/__init__.py deleted file mode 100644 index a4249b9e..00000000 --- a/venv/lib/python3.12/site-packages/dns/__init__.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""dnspython DNS toolkit""" - -__all__ = [ - "asyncbackend", - "asyncquery", - "asyncresolver", - "dnssec", - "dnssecalgs", - "dnssectypes", - "e164", - "edns", - "entropy", - "exception", - "flags", - "immutable", - "inet", - "ipv4", - "ipv6", - "message", - "name", - "namedict", - "node", - "opcode", - "query", - "quic", - "rcode", - "rdata", - "rdataclass", - "rdataset", - "rdatatype", - "renderer", - "resolver", - "reversename", - "rrset", - "serial", - "set", - "tokenizer", - "transaction", - "tsig", - "tsigkeyring", - "ttl", - "rdtypes", - "update", - "version", - "versioned", - "wire", - "xfr", - "zone", - "zonetypes", - "zonefile", -] - -from dns.version import version as __version__ # noqa diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c1fe1d17..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncbackend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncbackend.cpython-312.pyc deleted file mode 100644 index f565ee6c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncbackend.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncio_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncio_backend.cpython-312.pyc deleted file mode 100644 index a234db42..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/_asyncio_backend.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_ddr.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_ddr.cpython-312.pyc deleted file mode 100644 index 09d503cb..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/_ddr.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_features.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_features.cpython-312.pyc deleted file mode 100644 index 20995a7b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/_features.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_immutable_ctx.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_immutable_ctx.cpython-312.pyc deleted file mode 100644 index e8de86a8..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/_immutable_ctx.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/_trio_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/_trio_backend.cpython-312.pyc deleted file mode 100644 index 07acd088..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/_trio_backend.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncbackend.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncbackend.cpython-312.pyc deleted file mode 100644 index 34f87b6b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncbackend.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncquery.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncquery.cpython-312.pyc deleted file mode 100644 index 1908a438..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncquery.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncresolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/asyncresolver.cpython-312.pyc deleted file mode 100644 index d4f1cf61..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/asyncresolver.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/dnssec.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/dnssec.cpython-312.pyc deleted file mode 100644 index 742d96c2..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/dnssec.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/dnssectypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/dnssectypes.cpython-312.pyc deleted file mode 100644 index 5182b8fa..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/dnssectypes.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/e164.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/e164.cpython-312.pyc deleted file mode 100644 index 8e66b3c2..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/e164.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/edns.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/edns.cpython-312.pyc deleted file mode 100644 index 35ed343f..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/edns.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/entropy.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/entropy.cpython-312.pyc deleted file mode 100644 index 5a75a062..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/entropy.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/enum.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/enum.cpython-312.pyc deleted file mode 100644 index 5c6e4e88..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/enum.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/exception.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/exception.cpython-312.pyc deleted file mode 100644 index a39b6c74..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/exception.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/flags.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/flags.cpython-312.pyc deleted file mode 100644 index 4abe180f..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/flags.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/grange.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/grange.cpython-312.pyc deleted file mode 100644 index 6f066ea6..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/grange.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/immutable.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/immutable.cpython-312.pyc deleted file mode 100644 index 49b29085..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/immutable.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/inet.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/inet.cpython-312.pyc deleted file mode 100644 index 792bec9e..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/inet.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/ipv4.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/ipv4.cpython-312.pyc deleted file mode 100644 index 5e9c76af..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/ipv4.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/ipv6.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/ipv6.cpython-312.pyc deleted file mode 100644 index d2b3a263..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/ipv6.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/message.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/message.cpython-312.pyc deleted file mode 100644 index ecfdf56c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/message.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/name.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/name.cpython-312.pyc deleted file mode 100644 index 0b8c1e66..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/name.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/namedict.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/namedict.cpython-312.pyc deleted file mode 100644 index 8526d1b9..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/namedict.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/nameserver.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/nameserver.cpython-312.pyc deleted file mode 100644 index fcf26989..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/nameserver.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/node.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/node.cpython-312.pyc deleted file mode 100644 index c272f4fa..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/node.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/opcode.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/opcode.cpython-312.pyc deleted file mode 100644 index f1101f60..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/opcode.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/query.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/query.cpython-312.pyc deleted file mode 100644 index 832b2a83..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/query.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rcode.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rcode.cpython-312.pyc deleted file mode 100644 index b60cceb6..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/rcode.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdata.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdata.cpython-312.pyc deleted file mode 100644 index 75a893fd..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/rdata.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdataclass.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdataclass.cpython-312.pyc deleted file mode 100644 index db424959..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/rdataclass.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdataset.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdataset.cpython-312.pyc deleted file mode 100644 index b693be8b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/rdataset.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rdatatype.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rdatatype.cpython-312.pyc deleted file mode 100644 index 7ca2a813..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/rdatatype.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/renderer.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/renderer.cpython-312.pyc deleted file mode 100644 index 361b91b5..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/renderer.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/resolver.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/resolver.cpython-312.pyc deleted file mode 100644 index bd93082a..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/resolver.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/reversename.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/reversename.cpython-312.pyc deleted file mode 100644 index 899a1569..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/reversename.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/rrset.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/rrset.cpython-312.pyc deleted file mode 100644 index 5d021751..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/rrset.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/serial.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/serial.cpython-312.pyc deleted file mode 100644 index 4c6d6a7e..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/serial.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/set.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/set.cpython-312.pyc deleted file mode 100644 index c472d54c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/set.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/tokenizer.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/tokenizer.cpython-312.pyc deleted file mode 100644 index 243d70fb..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/tokenizer.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/transaction.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/transaction.cpython-312.pyc deleted file mode 100644 index 83cac42b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/transaction.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/tsig.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/tsig.cpython-312.pyc deleted file mode 100644 index f911181d..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/tsig.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/tsigkeyring.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/tsigkeyring.cpython-312.pyc deleted file mode 100644 index cbe1f2ad..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/tsigkeyring.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/ttl.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/ttl.cpython-312.pyc deleted file mode 100644 index 344c18f8..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/ttl.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/update.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/update.cpython-312.pyc deleted file mode 100644 index 75dce02f..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/update.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/version.cpython-312.pyc deleted file mode 100644 index 223d4cbb..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/version.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/versioned.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/versioned.cpython-312.pyc deleted file mode 100644 index f7bc8a7d..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/versioned.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/win32util.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/win32util.cpython-312.pyc deleted file mode 100644 index a265f273..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/win32util.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/wire.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/wire.cpython-312.pyc deleted file mode 100644 index 76f25fb3..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/wire.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/xfr.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/xfr.cpython-312.pyc deleted file mode 100644 index 9ff9675b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/xfr.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/zone.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/zone.cpython-312.pyc deleted file mode 100644 index 3b0942e5..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/zone.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/zonefile.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/zonefile.cpython-312.pyc deleted file mode 100644 index 69774528..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/zonefile.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/__pycache__/zonetypes.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/__pycache__/zonetypes.cpython-312.pyc deleted file mode 100644 index 288306eb..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/__pycache__/zonetypes.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/_asyncbackend.py b/venv/lib/python3.12/site-packages/dns/_asyncbackend.py deleted file mode 100644 index f6760fd0..00000000 --- a/venv/lib/python3.12/site-packages/dns/_asyncbackend.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# This is a nullcontext for both sync and async. 3.7 has a nullcontext, -# but it is only for sync use. - - -class NullContext: - def __init__(self, enter_result=None): - self.enter_result = enter_result - - def __enter__(self): - return self.enter_result - - def __exit__(self, exc_type, exc_value, traceback): - pass - - async def __aenter__(self): - return self.enter_result - - async def __aexit__(self, exc_type, exc_value, traceback): - pass - - -# These are declared here so backends can import them without creating -# circular dependencies with dns.asyncbackend. - - -class Socket: # pragma: no cover - def __init__(self, family: int, type: int): - self.family = family - self.type = type - - async def close(self): - pass - - async def getpeername(self): - raise NotImplementedError - - async def getsockname(self): - raise NotImplementedError - - async def getpeercert(self, timeout): - raise NotImplementedError - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await self.close() - - -class DatagramSocket(Socket): # pragma: no cover - async def sendto(self, what, destination, timeout): - raise NotImplementedError - - async def recvfrom(self, size, timeout): - raise NotImplementedError - - -class StreamSocket(Socket): # pragma: no cover - async def sendall(self, what, timeout): - raise NotImplementedError - - async def recv(self, size, timeout): - raise NotImplementedError - - -class NullTransport: - async def connect_tcp(self, host, port, timeout, local_address): - raise NotImplementedError - - -class Backend: # pragma: no cover - def name(self): - return "unknown" - - async def make_socket( - self, - af, - socktype, - proto=0, - source=None, - destination=None, - timeout=None, - ssl_context=None, - server_hostname=None, - ): - raise NotImplementedError - - def datagram_connection_required(self): - return False - - async def sleep(self, interval): - raise NotImplementedError - - def get_transport_class(self): - raise NotImplementedError - - async def wait_for(self, awaitable, timeout): - raise NotImplementedError diff --git a/venv/lib/python3.12/site-packages/dns/_asyncio_backend.py b/venv/lib/python3.12/site-packages/dns/_asyncio_backend.py deleted file mode 100644 index 6ab168de..00000000 --- a/venv/lib/python3.12/site-packages/dns/_asyncio_backend.py +++ /dev/null @@ -1,275 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""asyncio library query support""" - -import asyncio -import socket -import sys - -import dns._asyncbackend -import dns._features -import dns.exception -import dns.inet - -_is_win32 = sys.platform == "win32" - - -def _get_running_loop(): - try: - return asyncio.get_running_loop() - except AttributeError: # pragma: no cover - return asyncio.get_event_loop() - - -class _DatagramProtocol: - def __init__(self): - self.transport = None - self.recvfrom = None - - def connection_made(self, transport): - self.transport = transport - - def datagram_received(self, data, addr): - if self.recvfrom and not self.recvfrom.done(): - self.recvfrom.set_result((data, addr)) - - def error_received(self, exc): # pragma: no cover - if self.recvfrom and not self.recvfrom.done(): - self.recvfrom.set_exception(exc) - - def connection_lost(self, exc): - if self.recvfrom and not self.recvfrom.done(): - if exc is None: - # EOF we triggered. Is there a better way to do this? - try: - raise EOFError("EOF") - except EOFError as e: - self.recvfrom.set_exception(e) - else: - self.recvfrom.set_exception(exc) - - def close(self): - self.transport.close() - - -async def _maybe_wait_for(awaitable, timeout): - if timeout is not None: - try: - return await asyncio.wait_for(awaitable, timeout) - except asyncio.TimeoutError: - raise dns.exception.Timeout(timeout=timeout) - else: - return await awaitable - - -class DatagramSocket(dns._asyncbackend.DatagramSocket): - def __init__(self, family, transport, protocol): - super().__init__(family, socket.SOCK_DGRAM) - self.transport = transport - self.protocol = protocol - - async def sendto(self, what, destination, timeout): # pragma: no cover - # no timeout for asyncio sendto - self.transport.sendto(what, destination) - return len(what) - - async def recvfrom(self, size, timeout): - # ignore size as there's no way I know to tell protocol about it - done = _get_running_loop().create_future() - try: - assert self.protocol.recvfrom is None - self.protocol.recvfrom = done - await _maybe_wait_for(done, timeout) - return done.result() - finally: - self.protocol.recvfrom = None - - async def close(self): - self.protocol.close() - - async def getpeername(self): - return self.transport.get_extra_info("peername") - - async def getsockname(self): - return self.transport.get_extra_info("sockname") - - async def getpeercert(self, timeout): - raise NotImplementedError - - -class StreamSocket(dns._asyncbackend.StreamSocket): - def __init__(self, af, reader, writer): - super().__init__(af, socket.SOCK_STREAM) - self.reader = reader - self.writer = writer - - async def sendall(self, what, timeout): - self.writer.write(what) - return await _maybe_wait_for(self.writer.drain(), timeout) - - async def recv(self, size, timeout): - return await _maybe_wait_for(self.reader.read(size), timeout) - - async def close(self): - self.writer.close() - - async def getpeername(self): - return self.writer.get_extra_info("peername") - - async def getsockname(self): - return self.writer.get_extra_info("sockname") - - async def getpeercert(self, timeout): - return self.writer.get_extra_info("peercert") - - -if dns._features.have("doh"): - import anyio - import httpcore - import httpcore._backends.anyio - import httpx - - _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend - _CoreAnyIOStream = httpcore._backends.anyio.AnyIOStream - - from dns.query import _compute_times, _expiration_for_this_attempt, _remaining - - class _NetworkBackend(_CoreAsyncNetworkBackend): - def __init__(self, resolver, local_port, bootstrap_address, family): - super().__init__() - self._local_port = local_port - self._resolver = resolver - self._bootstrap_address = bootstrap_address - self._family = family - if local_port != 0: - raise NotImplementedError( - "the asyncio transport for HTTPX cannot set the local port" - ) - - async def connect_tcp( - self, host, port, timeout, local_address, socket_options=None - ): # pylint: disable=signature-differs - addresses = [] - _, expiration = _compute_times(timeout) - if dns.inet.is_address(host): - addresses.append(host) - elif self._bootstrap_address is not None: - addresses.append(self._bootstrap_address) - else: - timeout = _remaining(expiration) - family = self._family - if local_address: - family = dns.inet.af_for_address(local_address) - answers = await self._resolver.resolve_name( - host, family=family, lifetime=timeout - ) - addresses = answers.addresses() - for address in addresses: - try: - attempt_expiration = _expiration_for_this_attempt(2.0, expiration) - timeout = _remaining(attempt_expiration) - with anyio.fail_after(timeout): - stream = await anyio.connect_tcp( - remote_host=address, - remote_port=port, - local_host=local_address, - ) - return _CoreAnyIOStream(stream) - except Exception: - pass - raise httpcore.ConnectError - - async def connect_unix_socket( - self, path, timeout, socket_options=None - ): # pylint: disable=signature-differs - raise NotImplementedError - - async def sleep(self, seconds): # pylint: disable=signature-differs - await anyio.sleep(seconds) - - class _HTTPTransport(httpx.AsyncHTTPTransport): - def __init__( - self, - *args, - local_port=0, - bootstrap_address=None, - resolver=None, - family=socket.AF_UNSPEC, - **kwargs, - ): - if resolver is None and bootstrap_address is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.asyncresolver - - resolver = dns.asyncresolver.Resolver() - super().__init__(*args, **kwargs) - self._pool._network_backend = _NetworkBackend( - resolver, local_port, bootstrap_address, family - ) - -else: - _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore - - -class Backend(dns._asyncbackend.Backend): - def name(self): - return "asyncio" - - async def make_socket( - self, - af, - socktype, - proto=0, - source=None, - destination=None, - timeout=None, - ssl_context=None, - server_hostname=None, - ): - loop = _get_running_loop() - if socktype == socket.SOCK_DGRAM: - if _is_win32 and source is None: - # Win32 wants explicit binding before recvfrom(). This is the - # proper fix for [#637]. - source = (dns.inet.any_for_af(af), 0) - transport, protocol = await loop.create_datagram_endpoint( - _DatagramProtocol, - source, - family=af, - proto=proto, - remote_addr=destination, - ) - return DatagramSocket(af, transport, protocol) - elif socktype == socket.SOCK_STREAM: - if destination is None: - # This shouldn't happen, but we check to make code analysis software - # happier. - raise ValueError("destination required for stream sockets") - (r, w) = await _maybe_wait_for( - asyncio.open_connection( - destination[0], - destination[1], - ssl=ssl_context, - family=af, - proto=proto, - local_addr=source, - server_hostname=server_hostname, - ), - timeout, - ) - return StreamSocket(af, r, w) - raise NotImplementedError( - "unsupported socket " + f"type {socktype}" - ) # pragma: no cover - - async def sleep(self, interval): - await asyncio.sleep(interval) - - def datagram_connection_required(self): - return False - - def get_transport_class(self): - return _HTTPTransport - - async def wait_for(self, awaitable, timeout): - return await _maybe_wait_for(awaitable, timeout) diff --git a/venv/lib/python3.12/site-packages/dns/_ddr.py b/venv/lib/python3.12/site-packages/dns/_ddr.py deleted file mode 100644 index bf5c11eb..00000000 --- a/venv/lib/python3.12/site-packages/dns/_ddr.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license -# -# Support for Discovery of Designated Resolvers - -import socket -import time -from urllib.parse import urlparse - -import dns.asyncbackend -import dns.inet -import dns.name -import dns.nameserver -import dns.query -import dns.rdtypes.svcbbase - -# The special name of the local resolver when using DDR -_local_resolver_name = dns.name.from_text("_dns.resolver.arpa") - - -# -# Processing is split up into I/O independent and I/O dependent parts to -# make supporting sync and async versions easy. -# - - -class _SVCBInfo: - def __init__(self, bootstrap_address, port, hostname, nameservers): - self.bootstrap_address = bootstrap_address - self.port = port - self.hostname = hostname - self.nameservers = nameservers - - def ddr_check_certificate(self, cert): - """Verify that the _SVCBInfo's address is in the cert's subjectAltName (SAN)""" - for name, value in cert["subjectAltName"]: - if name == "IP Address" and value == self.bootstrap_address: - return True - return False - - def make_tls_context(self): - ssl = dns.query.ssl - ctx = ssl.create_default_context() - ctx.minimum_version = ssl.TLSVersion.TLSv1_2 - return ctx - - def ddr_tls_check_sync(self, lifetime): - ctx = self.make_tls_context() - expiration = time.time() + lifetime - with socket.create_connection( - (self.bootstrap_address, self.port), lifetime - ) as s: - with ctx.wrap_socket(s, server_hostname=self.hostname) as ts: - ts.settimeout(dns.query._remaining(expiration)) - ts.do_handshake() - cert = ts.getpeercert() - return self.ddr_check_certificate(cert) - - async def ddr_tls_check_async(self, lifetime, backend=None): - if backend is None: - backend = dns.asyncbackend.get_default_backend() - ctx = self.make_tls_context() - expiration = time.time() + lifetime - async with await backend.make_socket( - dns.inet.af_for_address(self.bootstrap_address), - socket.SOCK_STREAM, - 0, - None, - (self.bootstrap_address, self.port), - lifetime, - ctx, - self.hostname, - ) as ts: - cert = await ts.getpeercert(dns.query._remaining(expiration)) - return self.ddr_check_certificate(cert) - - -def _extract_nameservers_from_svcb(answer): - bootstrap_address = answer.nameserver - if not dns.inet.is_address(bootstrap_address): - return [] - infos = [] - for rr in answer.rrset.processing_order(): - nameservers = [] - param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.ALPN) - if param is None: - continue - alpns = set(param.ids) - host = rr.target.to_text(omit_final_dot=True) - port = None - param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.PORT) - if param is not None: - port = param.port - # For now we ignore address hints and address resolution and always use the - # bootstrap address - if b"h2" in alpns: - param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.DOHPATH) - if param is None or not param.value.endswith(b"{?dns}"): - continue - path = param.value[:-6].decode() - if not path.startswith("/"): - path = "/" + path - if port is None: - port = 443 - url = f"https://{host}:{port}{path}" - # check the URL - try: - urlparse(url) - nameservers.append(dns.nameserver.DoHNameserver(url, bootstrap_address)) - except Exception: - # continue processing other ALPN types - pass - if b"dot" in alpns: - if port is None: - port = 853 - nameservers.append( - dns.nameserver.DoTNameserver(bootstrap_address, port, host) - ) - if b"doq" in alpns: - if port is None: - port = 853 - nameservers.append( - dns.nameserver.DoQNameserver(bootstrap_address, port, True, host) - ) - if len(nameservers) > 0: - infos.append(_SVCBInfo(bootstrap_address, port, host, nameservers)) - return infos - - -def _get_nameservers_sync(answer, lifetime): - """Return a list of TLS-validated resolver nameservers extracted from an SVCB - answer.""" - nameservers = [] - infos = _extract_nameservers_from_svcb(answer) - for info in infos: - try: - if info.ddr_tls_check_sync(lifetime): - nameservers.extend(info.nameservers) - except Exception: - pass - return nameservers - - -async def _get_nameservers_async(answer, lifetime): - """Return a list of TLS-validated resolver nameservers extracted from an SVCB - answer.""" - nameservers = [] - infos = _extract_nameservers_from_svcb(answer) - for info in infos: - try: - if await info.ddr_tls_check_async(lifetime): - nameservers.extend(info.nameservers) - except Exception: - pass - return nameservers diff --git a/venv/lib/python3.12/site-packages/dns/_features.py b/venv/lib/python3.12/site-packages/dns/_features.py deleted file mode 100644 index fa6d4955..00000000 --- a/venv/lib/python3.12/site-packages/dns/_features.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import importlib.metadata -import itertools -import string -from typing import Dict, List, Tuple - - -def _tuple_from_text(version: str) -> Tuple: - text_parts = version.split(".") - int_parts = [] - for text_part in text_parts: - digit_prefix = "".join( - itertools.takewhile(lambda x: x in string.digits, text_part) - ) - try: - int_parts.append(int(digit_prefix)) - except Exception: - break - return tuple(int_parts) - - -def _version_check( - requirement: str, -) -> bool: - """Is the requirement fulfilled? - - The requirement must be of the form - - package>=version - """ - package, minimum = requirement.split(">=") - try: - version = importlib.metadata.version(package) - # This shouldn't happen, but it apparently can. - if version is None: - return False - except Exception: - return False - t_version = _tuple_from_text(version) - t_minimum = _tuple_from_text(minimum) - if t_version < t_minimum: - return False - return True - - -_cache: Dict[str, bool] = {} - - -def have(feature: str) -> bool: - """Is *feature* available? - - This tests if all optional packages needed for the - feature are available and recent enough. - - Returns ``True`` if the feature is available, - and ``False`` if it is not or if metadata is - missing. - """ - value = _cache.get(feature) - if value is not None: - return value - requirements = _requirements.get(feature) - if requirements is None: - # we make a cache entry here for consistency not performance - _cache[feature] = False - return False - ok = True - for requirement in requirements: - if not _version_check(requirement): - ok = False - break - _cache[feature] = ok - return ok - - -def force(feature: str, enabled: bool) -> None: - """Force the status of *feature* to be *enabled*. - - This method is provided as a workaround for any cases - where importlib.metadata is ineffective, or for testing. - """ - _cache[feature] = enabled - - -_requirements: Dict[str, List[str]] = { - ### BEGIN generated requirements - "dnssec": ["cryptography>=43"], - "doh": ["httpcore>=1.0.0", "httpx>=0.26.0", "h2>=4.1.0"], - "doq": ["aioquic>=1.0.0"], - "idna": ["idna>=3.7"], - "trio": ["trio>=0.23"], - "wmi": ["wmi>=1.5.1"], - ### END generated requirements -} diff --git a/venv/lib/python3.12/site-packages/dns/_immutable_ctx.py b/venv/lib/python3.12/site-packages/dns/_immutable_ctx.py deleted file mode 100644 index ae7a33bf..00000000 --- a/venv/lib/python3.12/site-packages/dns/_immutable_ctx.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# This implementation of the immutable decorator requires python >= -# 3.7, and is significantly more storage efficient when making classes -# with slots immutable. It's also faster. - -import contextvars -import inspect - -_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False) - - -class _Immutable: - """Immutable mixin class""" - - # We set slots to the empty list to say "we don't have any attributes". - # We do this so that if we're mixed in with a class with __slots__, we - # don't cause a __dict__ to be added which would waste space. - - __slots__ = () - - def __setattr__(self, name, value): - if _in__init__.get() is not self: - raise TypeError("object doesn't support attribute assignment") - else: - super().__setattr__(name, value) - - def __delattr__(self, name): - if _in__init__.get() is not self: - raise TypeError("object doesn't support attribute assignment") - else: - super().__delattr__(name) - - -def _immutable_init(f): - def nf(*args, **kwargs): - previous = _in__init__.set(args[0]) - try: - # call the actual __init__ - f(*args, **kwargs) - finally: - _in__init__.reset(previous) - - nf.__signature__ = inspect.signature(f) - return nf - - -def immutable(cls): - if _Immutable in cls.__mro__: - # Some ancestor already has the mixin, so just make sure we keep - # following the __init__ protocol. - cls.__init__ = _immutable_init(cls.__init__) - if hasattr(cls, "__setstate__"): - cls.__setstate__ = _immutable_init(cls.__setstate__) - ncls = cls - else: - # Mixin the Immutable class and follow the __init__ protocol. - class ncls(_Immutable, cls): - # We have to do the __slots__ declaration here too! - __slots__ = () - - @_immutable_init - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - if hasattr(cls, "__setstate__"): - - @_immutable_init - def __setstate__(self, *args, **kwargs): - super().__setstate__(*args, **kwargs) - - # make ncls have the same name and module as cls - ncls.__name__ = cls.__name__ - ncls.__qualname__ = cls.__qualname__ - ncls.__module__ = cls.__module__ - return ncls diff --git a/venv/lib/python3.12/site-packages/dns/_trio_backend.py b/venv/lib/python3.12/site-packages/dns/_trio_backend.py deleted file mode 100644 index 0ed904dd..00000000 --- a/venv/lib/python3.12/site-packages/dns/_trio_backend.py +++ /dev/null @@ -1,253 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""trio async I/O library query support""" - -import socket - -import trio -import trio.socket # type: ignore - -import dns._asyncbackend -import dns._features -import dns.exception -import dns.inet - -if not dns._features.have("trio"): - raise ImportError("trio not found or too old") - - -def _maybe_timeout(timeout): - if timeout is not None: - return trio.move_on_after(timeout) - else: - return dns._asyncbackend.NullContext() - - -# for brevity -_lltuple = dns.inet.low_level_address_tuple - -# pylint: disable=redefined-outer-name - - -class DatagramSocket(dns._asyncbackend.DatagramSocket): - def __init__(self, sock): - super().__init__(sock.family, socket.SOCK_DGRAM) - self.socket = sock - - async def sendto(self, what, destination, timeout): - with _maybe_timeout(timeout): - if destination is None: - return await self.socket.send(what) - else: - return await self.socket.sendto(what, destination) - raise dns.exception.Timeout( - timeout=timeout - ) # pragma: no cover lgtm[py/unreachable-statement] - - async def recvfrom(self, size, timeout): - with _maybe_timeout(timeout): - return await self.socket.recvfrom(size) - raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] - - async def close(self): - self.socket.close() - - async def getpeername(self): - return self.socket.getpeername() - - async def getsockname(self): - return self.socket.getsockname() - - async def getpeercert(self, timeout): - raise NotImplementedError - - -class StreamSocket(dns._asyncbackend.StreamSocket): - def __init__(self, family, stream, tls=False): - super().__init__(family, socket.SOCK_STREAM) - self.stream = stream - self.tls = tls - - async def sendall(self, what, timeout): - with _maybe_timeout(timeout): - return await self.stream.send_all(what) - raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] - - async def recv(self, size, timeout): - with _maybe_timeout(timeout): - return await self.stream.receive_some(size) - raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] - - async def close(self): - await self.stream.aclose() - - async def getpeername(self): - if self.tls: - return self.stream.transport_stream.socket.getpeername() - else: - return self.stream.socket.getpeername() - - async def getsockname(self): - if self.tls: - return self.stream.transport_stream.socket.getsockname() - else: - return self.stream.socket.getsockname() - - async def getpeercert(self, timeout): - if self.tls: - with _maybe_timeout(timeout): - await self.stream.do_handshake() - return self.stream.getpeercert() - else: - raise NotImplementedError - - -if dns._features.have("doh"): - import httpcore - import httpcore._backends.trio - import httpx - - _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend - _CoreTrioStream = httpcore._backends.trio.TrioStream - - from dns.query import _compute_times, _expiration_for_this_attempt, _remaining - - class _NetworkBackend(_CoreAsyncNetworkBackend): - def __init__(self, resolver, local_port, bootstrap_address, family): - super().__init__() - self._local_port = local_port - self._resolver = resolver - self._bootstrap_address = bootstrap_address - self._family = family - - async def connect_tcp( - self, host, port, timeout, local_address, socket_options=None - ): # pylint: disable=signature-differs - addresses = [] - _, expiration = _compute_times(timeout) - if dns.inet.is_address(host): - addresses.append(host) - elif self._bootstrap_address is not None: - addresses.append(self._bootstrap_address) - else: - timeout = _remaining(expiration) - family = self._family - if local_address: - family = dns.inet.af_for_address(local_address) - answers = await self._resolver.resolve_name( - host, family=family, lifetime=timeout - ) - addresses = answers.addresses() - for address in addresses: - try: - af = dns.inet.af_for_address(address) - if local_address is not None or self._local_port != 0: - source = (local_address, self._local_port) - else: - source = None - destination = (address, port) - attempt_expiration = _expiration_for_this_attempt(2.0, expiration) - timeout = _remaining(attempt_expiration) - sock = await Backend().make_socket( - af, socket.SOCK_STREAM, 0, source, destination, timeout - ) - return _CoreTrioStream(sock.stream) - except Exception: - continue - raise httpcore.ConnectError - - async def connect_unix_socket( - self, path, timeout, socket_options=None - ): # pylint: disable=signature-differs - raise NotImplementedError - - async def sleep(self, seconds): # pylint: disable=signature-differs - await trio.sleep(seconds) - - class _HTTPTransport(httpx.AsyncHTTPTransport): - def __init__( - self, - *args, - local_port=0, - bootstrap_address=None, - resolver=None, - family=socket.AF_UNSPEC, - **kwargs, - ): - if resolver is None and bootstrap_address is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.asyncresolver - - resolver = dns.asyncresolver.Resolver() - super().__init__(*args, **kwargs) - self._pool._network_backend = _NetworkBackend( - resolver, local_port, bootstrap_address, family - ) - -else: - _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore - - -class Backend(dns._asyncbackend.Backend): - def name(self): - return "trio" - - async def make_socket( - self, - af, - socktype, - proto=0, - source=None, - destination=None, - timeout=None, - ssl_context=None, - server_hostname=None, - ): - s = trio.socket.socket(af, socktype, proto) - stream = None - try: - if source: - await s.bind(_lltuple(source, af)) - if socktype == socket.SOCK_STREAM or destination is not None: - connected = False - with _maybe_timeout(timeout): - await s.connect(_lltuple(destination, af)) - connected = True - if not connected: - raise dns.exception.Timeout( - timeout=timeout - ) # lgtm[py/unreachable-statement] - except Exception: # pragma: no cover - s.close() - raise - if socktype == socket.SOCK_DGRAM: - return DatagramSocket(s) - elif socktype == socket.SOCK_STREAM: - stream = trio.SocketStream(s) - tls = False - if ssl_context: - tls = True - try: - stream = trio.SSLStream( - stream, ssl_context, server_hostname=server_hostname - ) - except Exception: # pragma: no cover - await stream.aclose() - raise - return StreamSocket(af, stream, tls) - raise NotImplementedError( - "unsupported socket " + f"type {socktype}" - ) # pragma: no cover - - async def sleep(self, interval): - await trio.sleep(interval) - - def get_transport_class(self): - return _HTTPTransport - - async def wait_for(self, awaitable, timeout): - with _maybe_timeout(timeout): - return await awaitable - raise dns.exception.Timeout( - timeout=timeout - ) # pragma: no cover lgtm[py/unreachable-statement] diff --git a/venv/lib/python3.12/site-packages/dns/asyncbackend.py b/venv/lib/python3.12/site-packages/dns/asyncbackend.py deleted file mode 100644 index 0ec58b06..00000000 --- a/venv/lib/python3.12/site-packages/dns/asyncbackend.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -from typing import Dict - -import dns.exception - -# pylint: disable=unused-import -from dns._asyncbackend import ( # noqa: F401 lgtm[py/unused-import] - Backend, - DatagramSocket, - Socket, - StreamSocket, -) - -# pylint: enable=unused-import - -_default_backend = None - -_backends: Dict[str, Backend] = {} - -# Allow sniffio import to be disabled for testing purposes -_no_sniffio = False - - -class AsyncLibraryNotFoundError(dns.exception.DNSException): - pass - - -def get_backend(name: str) -> Backend: - """Get the specified asynchronous backend. - - *name*, a ``str``, the name of the backend. Currently the "trio" - and "asyncio" backends are available. - - Raises NotImplementedError if an unknown backend name is specified. - """ - # pylint: disable=import-outside-toplevel,redefined-outer-name - backend = _backends.get(name) - if backend: - return backend - if name == "trio": - import dns._trio_backend - - backend = dns._trio_backend.Backend() - elif name == "asyncio": - import dns._asyncio_backend - - backend = dns._asyncio_backend.Backend() - else: - raise NotImplementedError(f"unimplemented async backend {name}") - _backends[name] = backend - return backend - - -def sniff() -> str: - """Attempt to determine the in-use asynchronous I/O library by using - the ``sniffio`` module if it is available. - - Returns the name of the library, or raises AsyncLibraryNotFoundError - if the library cannot be determined. - """ - # pylint: disable=import-outside-toplevel - try: - if _no_sniffio: - raise ImportError - import sniffio - - try: - return sniffio.current_async_library() - except sniffio.AsyncLibraryNotFoundError: - raise AsyncLibraryNotFoundError("sniffio cannot determine async library") - except ImportError: - import asyncio - - try: - asyncio.get_running_loop() - return "asyncio" - except RuntimeError: - raise AsyncLibraryNotFoundError("no async library detected") - - -def get_default_backend() -> Backend: - """Get the default backend, initializing it if necessary.""" - if _default_backend: - return _default_backend - - return set_default_backend(sniff()) - - -def set_default_backend(name: str) -> Backend: - """Set the default backend. - - It's not normally necessary to call this method, as - ``get_default_backend()`` will initialize the backend - appropriately in many cases. If ``sniffio`` is not installed, or - in testing situations, this function allows the backend to be set - explicitly. - """ - global _default_backend - _default_backend = get_backend(name) - return _default_backend diff --git a/venv/lib/python3.12/site-packages/dns/asyncquery.py b/venv/lib/python3.12/site-packages/dns/asyncquery.py deleted file mode 100644 index efad0fd7..00000000 --- a/venv/lib/python3.12/site-packages/dns/asyncquery.py +++ /dev/null @@ -1,913 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Talk to a DNS server.""" - -import base64 -import contextlib -import random -import socket -import struct -import time -import urllib.parse -from typing import Any, Dict, Optional, Tuple, Union, cast - -import dns.asyncbackend -import dns.exception -import dns.inet -import dns.message -import dns.name -import dns.quic -import dns.rcode -import dns.rdataclass -import dns.rdatatype -import dns.transaction -from dns._asyncbackend import NullContext -from dns.query import ( - BadResponse, - HTTPVersion, - NoDOH, - NoDOQ, - UDPMode, - _check_status, - _compute_times, - _make_dot_ssl_context, - _matches_destination, - _remaining, - have_doh, - ssl, -) - -if have_doh: - import httpx - -# for brevity -_lltuple = dns.inet.low_level_address_tuple - - -def _source_tuple(af, address, port): - # Make a high level source tuple, or return None if address and port - # are both None - if address or port: - if address is None: - if af == socket.AF_INET: - address = "0.0.0.0" - elif af == socket.AF_INET6: - address = "::" - else: - raise NotImplementedError(f"unknown address family {af}") - return (address, port) - else: - return None - - -def _timeout(expiration, now=None): - if expiration is not None: - if not now: - now = time.time() - return max(expiration - now, 0) - else: - return None - - -async def send_udp( - sock: dns.asyncbackend.DatagramSocket, - what: Union[dns.message.Message, bytes], - destination: Any, - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified UDP socket. - - *sock*, a ``dns.asyncbackend.DatagramSocket``. - - *what*, a ``bytes`` or ``dns.message.Message``, the message to send. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where to send the query. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. The expiration value is meaningless for the asyncio backend, as - asyncio's transport sendto() never blocks. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - what = what.to_wire() - sent_time = time.time() - n = await sock.sendto(what, destination, _timeout(expiration, sent_time)) - return (n, sent_time) - - -async def receive_udp( - sock: dns.asyncbackend.DatagramSocket, - destination: Optional[Any] = None, - expiration: Optional[float] = None, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - ignore_errors: bool = False, - query: Optional[dns.message.Message] = None, -) -> Any: - """Read a DNS message from a UDP socket. - - *sock*, a ``dns.asyncbackend.DatagramSocket``. - - See :py:func:`dns.query.receive_udp()` for the documentation of the other - parameters, and exceptions. - - Returns a ``(dns.message.Message, float, tuple)`` tuple of the received message, the - received time, and the address where the message arrived from. - """ - - wire = b"" - while True: - (wire, from_address) = await sock.recvfrom(65535, _timeout(expiration)) - if not _matches_destination( - sock.family, from_address, destination, ignore_unexpected - ): - continue - received_time = time.time() - try: - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - raise_on_truncation=raise_on_truncation, - ) - except dns.message.Truncated as e: - # See the comment in query.py for details. - if ( - ignore_errors - and query is not None - and not query.is_response(e.message()) - ): - continue - else: - raise - except Exception: - if ignore_errors: - continue - else: - raise - if ignore_errors and query is not None and not query.is_response(r): - continue - return (r, received_time, from_address) - - -async def udp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - sock: Optional[dns.asyncbackend.DatagramSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - ignore_errors: bool = False, -) -> dns.message.Message: - """Return the response obtained after sending a query via UDP. - - *sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, - the socket to use for the query. If ``None``, the default, a - socket is created. Note that if a socket is provided, the - *source*, *source_port*, and *backend* are ignored. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.udp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - af = dns.inet.af_for_address(where) - destination = _lltuple((where, port), af) - if sock: - cm: contextlib.AbstractAsyncContextManager = NullContext(sock) - else: - if not backend: - backend = dns.asyncbackend.get_default_backend() - stuple = _source_tuple(af, source, source_port) - if backend.datagram_connection_required(): - dtuple = (where, port) - else: - dtuple = None - cm = await backend.make_socket(af, socket.SOCK_DGRAM, 0, stuple, dtuple) - async with cm as s: - await send_udp(s, wire, destination, expiration) - (r, received_time, _) = await receive_udp( - s, - destination, - expiration, - ignore_unexpected, - one_rr_per_rrset, - q.keyring, - q.mac, - ignore_trailing, - raise_on_truncation, - ignore_errors, - q, - ) - r.time = received_time - begin_time - # We don't need to check q.is_response() if we are in ignore_errors mode - # as receive_udp() will have checked it. - if not (ignore_errors or q.is_response(r)): - raise BadResponse - return r - - -async def udp_with_fallback( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - udp_sock: Optional[dns.asyncbackend.DatagramSocket] = None, - tcp_sock: Optional[dns.asyncbackend.StreamSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - ignore_errors: bool = False, -) -> Tuple[dns.message.Message, bool]: - """Return the response to the query, trying UDP first and falling back - to TCP if UDP results in a truncated response. - - *udp_sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, - the socket to use for the UDP query. If ``None``, the default, a - socket is created. Note that if a socket is provided the *source*, - *source_port*, and *backend* are ignored for the UDP query. - - *tcp_sock*, a ``dns.asyncbackend.StreamSocket``, or ``None``, the - socket to use for the TCP query. If ``None``, the default, a - socket is created. Note that if a socket is provided *where*, - *source*, *source_port*, and *backend* are ignored for the TCP query. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.udp_with_fallback()` for the documentation - of the other parameters, exceptions, and return type of this - method. - """ - try: - response = await udp( - q, - where, - timeout, - port, - source, - source_port, - ignore_unexpected, - one_rr_per_rrset, - ignore_trailing, - True, - udp_sock, - backend, - ignore_errors, - ) - return (response, False) - except dns.message.Truncated: - response = await tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - tcp_sock, - backend, - ) - return (response, True) - - -async def send_tcp( - sock: dns.asyncbackend.StreamSocket, - what: Union[dns.message.Message, bytes], - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified TCP socket. - - *sock*, a ``dns.asyncbackend.StreamSocket``. - - See :py:func:`dns.query.send_tcp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - if isinstance(what, dns.message.Message): - tcpmsg = what.to_wire(prepend_length=True) - else: - # copying the wire into tcpmsg is inefficient, but lets us - # avoid writev() or doing a short write that would get pushed - # onto the net - tcpmsg = len(what).to_bytes(2, "big") + what - sent_time = time.time() - await sock.sendall(tcpmsg, _timeout(expiration, sent_time)) - return (len(tcpmsg), sent_time) - - -async def _read_exactly(sock, count, expiration): - """Read the specified number of bytes from stream. Keep trying until we - either get the desired amount, or we hit EOF. - """ - s = b"" - while count > 0: - n = await sock.recv(count, _timeout(expiration)) - if n == b"": - raise EOFError("EOF") - count = count - len(n) - s = s + n - return s - - -async def receive_tcp( - sock: dns.asyncbackend.StreamSocket, - expiration: Optional[float] = None, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, -) -> Tuple[dns.message.Message, float]: - """Read a DNS message from a TCP socket. - - *sock*, a ``dns.asyncbackend.StreamSocket``. - - See :py:func:`dns.query.receive_tcp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - ldata = await _read_exactly(sock, 2, expiration) - (l,) = struct.unpack("!H", ldata) - wire = await _read_exactly(sock, l, expiration) - received_time = time.time() - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - return (r, received_time) - - -async def tcp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[dns.asyncbackend.StreamSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via TCP. - - *sock*, a ``dns.asyncbacket.StreamSocket``, or ``None``, the - socket to use for the query. If ``None``, the default, a socket - is created. Note that if a socket is provided - *where*, *port*, *source*, *source_port*, and *backend* are ignored. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.tcp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - if sock: - # Verify that the socket is connected, as if it's not connected, - # it's not writable, and the polling in send_tcp() will time out or - # hang forever. - await sock.getpeername() - cm: contextlib.AbstractAsyncContextManager = NullContext(sock) - else: - # These are simple (address, port) pairs, not family-dependent tuples - # you pass to low-level socket code. - af = dns.inet.af_for_address(where) - stuple = _source_tuple(af, source, source_port) - dtuple = (where, port) - if not backend: - backend = dns.asyncbackend.get_default_backend() - cm = await backend.make_socket( - af, socket.SOCK_STREAM, 0, stuple, dtuple, timeout - ) - async with cm as s: - await send_tcp(s, wire, expiration) - (r, received_time) = await receive_tcp( - s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - - -async def tls( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[dns.asyncbackend.StreamSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - ssl_context: Optional[ssl.SSLContext] = None, - server_hostname: Optional[str] = None, - verify: Union[bool, str] = True, -) -> dns.message.Message: - """Return the response obtained after sending a query via TLS. - - *sock*, an ``asyncbackend.StreamSocket``, or ``None``, the socket - to use for the query. If ``None``, the default, a socket is - created. Note that if a socket is provided, it must be a - connected SSL stream socket, and *where*, *port*, - *source*, *source_port*, *backend*, *ssl_context*, and *server_hostname* - are ignored. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.tls()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - (begin_time, expiration) = _compute_times(timeout) - if sock: - cm: contextlib.AbstractAsyncContextManager = NullContext(sock) - else: - if ssl_context is None: - ssl_context = _make_dot_ssl_context(server_hostname, verify) - af = dns.inet.af_for_address(where) - stuple = _source_tuple(af, source, source_port) - dtuple = (where, port) - if not backend: - backend = dns.asyncbackend.get_default_backend() - cm = await backend.make_socket( - af, - socket.SOCK_STREAM, - 0, - stuple, - dtuple, - timeout, - ssl_context, - server_hostname, - ) - async with cm as s: - timeout = _timeout(expiration) - response = await tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - s, - backend, - ) - end_time = time.time() - response.time = end_time - begin_time - return response - - -def _maybe_get_resolver( - resolver: Optional["dns.asyncresolver.Resolver"], -) -> "dns.asyncresolver.Resolver": - # We need a separate method for this to avoid overriding the global - # variable "dns" with the as-yet undefined local variable "dns" - # in https(). - if resolver is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.asyncresolver - - resolver = dns.asyncresolver.Resolver() - return resolver - - -async def https( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 443, - source: Optional[str] = None, - source_port: int = 0, # pylint: disable=W0613 - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - client: Optional["httpx.AsyncClient"] = None, - path: str = "/dns-query", - post: bool = True, - verify: Union[bool, str] = True, - bootstrap_address: Optional[str] = None, - resolver: Optional["dns.asyncresolver.Resolver"] = None, - family: int = socket.AF_UNSPEC, - http_version: HTTPVersion = HTTPVersion.DEFAULT, -) -> dns.message.Message: - """Return the response obtained after sending a query via DNS-over-HTTPS. - - *client*, a ``httpx.AsyncClient``. If provided, the client to use for - the query. - - Unlike the other dnspython async functions, a backend cannot be provided - in this function because httpx always auto-detects the async backend. - - See :py:func:`dns.query.https()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - try: - af = dns.inet.af_for_address(where) - except ValueError: - af = None - if af is not None and dns.inet.is_address(where): - if af == socket.AF_INET: - url = f"https://{where}:{port}{path}" - elif af == socket.AF_INET6: - url = f"https://[{where}]:{port}{path}" - else: - url = where - - extensions = {} - if bootstrap_address is None: - # pylint: disable=possibly-used-before-assignment - parsed = urllib.parse.urlparse(url) - if parsed.hostname is None: - raise ValueError("no hostname in URL") - if dns.inet.is_address(parsed.hostname): - bootstrap_address = parsed.hostname - extensions["sni_hostname"] = parsed.hostname - if parsed.port is not None: - port = parsed.port - - if http_version == HTTPVersion.H3 or ( - http_version == HTTPVersion.DEFAULT and not have_doh - ): - if bootstrap_address is None: - resolver = _maybe_get_resolver(resolver) - assert parsed.hostname is not None # for mypy - answers = await resolver.resolve_name(parsed.hostname, family) - bootstrap_address = random.choice(list(answers.addresses())) - return await _http3( - q, - bootstrap_address, - url, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - verify=verify, - post=post, - ) - - if not have_doh: - raise NoDOH # pragma: no cover - # pylint: disable=possibly-used-before-assignment - if client and not isinstance(client, httpx.AsyncClient): - raise ValueError("session parameter must be an httpx.AsyncClient") - # pylint: enable=possibly-used-before-assignment - - wire = q.to_wire() - headers = {"accept": "application/dns-message"} - - h1 = http_version in (HTTPVersion.H1, HTTPVersion.DEFAULT) - h2 = http_version in (HTTPVersion.H2, HTTPVersion.DEFAULT) - - backend = dns.asyncbackend.get_default_backend() - - if source is None: - local_address = None - local_port = 0 - else: - local_address = source - local_port = source_port - - if client: - cm: contextlib.AbstractAsyncContextManager = NullContext(client) - else: - transport = backend.get_transport_class()( - local_address=local_address, - http1=h1, - http2=h2, - verify=verify, - local_port=local_port, - bootstrap_address=bootstrap_address, - resolver=resolver, - family=family, - ) - - cm = httpx.AsyncClient(http1=h1, http2=h2, verify=verify, transport=transport) - - async with cm as the_client: - # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH - # GET and POST examples - if post: - headers.update( - { - "content-type": "application/dns-message", - "content-length": str(len(wire)), - } - ) - response = await backend.wait_for( - the_client.post( - url, - headers=headers, - content=wire, - extensions=extensions, - ), - timeout, - ) - else: - wire = base64.urlsafe_b64encode(wire).rstrip(b"=") - twire = wire.decode() # httpx does a repr() if we give it bytes - response = await backend.wait_for( - the_client.get( - url, - headers=headers, - params={"dns": twire}, - extensions=extensions, - ), - timeout, - ) - - # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH - # status codes - if response.status_code < 200 or response.status_code > 299: - raise ValueError( - f"{where} responded with status code {response.status_code}" - f"\nResponse body: {response.content!r}" - ) - r = dns.message.from_wire( - response.content, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = response.elapsed.total_seconds() - if not q.is_response(r): - raise BadResponse - return r - - -async def _http3( - q: dns.message.Message, - where: str, - url: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - verify: Union[bool, str] = True, - backend: Optional[dns.asyncbackend.Backend] = None, - hostname: Optional[str] = None, - post: bool = True, -) -> dns.message.Message: - if not dns.quic.have_quic: - raise NoDOH("DNS-over-HTTP3 is not available.") # pragma: no cover - - url_parts = urllib.parse.urlparse(url) - hostname = url_parts.hostname - if url_parts.port is not None: - port = url_parts.port - - q.id = 0 - wire = q.to_wire() - (cfactory, mfactory) = dns.quic.factories_for_backend(backend) - - async with cfactory() as context: - async with mfactory( - context, verify_mode=verify, server_name=hostname, h3=True - ) as the_manager: - the_connection = the_manager.connect(where, port, source, source_port) - (start, expiration) = _compute_times(timeout) - stream = await the_connection.make_stream(timeout) - async with stream: - # note that send_h3() does not need await - stream.send_h3(url, wire, post) - wire = await stream.receive(_remaining(expiration)) - _check_status(stream.headers(), where, wire) - finish = time.time() - r = dns.message.from_wire( - wire, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = max(finish - start, 0.0) - if not q.is_response(r): - raise BadResponse - return r - - -async def quic( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - connection: Optional[dns.quic.AsyncQuicConnection] = None, - verify: Union[bool, str] = True, - backend: Optional[dns.asyncbackend.Backend] = None, - hostname: Optional[str] = None, - server_hostname: Optional[str] = None, -) -> dns.message.Message: - """Return the response obtained after sending an asynchronous query via - DNS-over-QUIC. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.quic()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - if not dns.quic.have_quic: - raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover - - if server_hostname is not None and hostname is None: - hostname = server_hostname - - q.id = 0 - wire = q.to_wire() - the_connection: dns.quic.AsyncQuicConnection - if connection: - cfactory = dns.quic.null_factory - mfactory = dns.quic.null_factory - the_connection = connection - else: - (cfactory, mfactory) = dns.quic.factories_for_backend(backend) - - async with cfactory() as context: - async with mfactory( - context, - verify_mode=verify, - server_name=server_hostname, - ) as the_manager: - if not connection: - the_connection = the_manager.connect(where, port, source, source_port) - (start, expiration) = _compute_times(timeout) - stream = await the_connection.make_stream(timeout) - async with stream: - await stream.send(wire, True) - wire = await stream.receive(_remaining(expiration)) - finish = time.time() - r = dns.message.from_wire( - wire, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = max(finish - start, 0.0) - if not q.is_response(r): - raise BadResponse - return r - - -async def _inbound_xfr( - txn_manager: dns.transaction.TransactionManager, - s: dns.asyncbackend.Socket, - query: dns.message.Message, - serial: Optional[int], - timeout: Optional[float], - expiration: float, -) -> Any: - """Given a socket, does the zone transfer.""" - rdtype = query.question[0].rdtype - is_ixfr = rdtype == dns.rdatatype.IXFR - origin = txn_manager.from_wire_origin() - wire = query.to_wire() - is_udp = s.type == socket.SOCK_DGRAM - if is_udp: - udp_sock = cast(dns.asyncbackend.DatagramSocket, s) - await udp_sock.sendto(wire, None, _timeout(expiration)) - else: - tcp_sock = cast(dns.asyncbackend.StreamSocket, s) - tcpmsg = struct.pack("!H", len(wire)) + wire - await tcp_sock.sendall(tcpmsg, expiration) - with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: - done = False - tsig_ctx = None - while not done: - (_, mexpiration) = _compute_times(timeout) - if mexpiration is None or ( - expiration is not None and mexpiration > expiration - ): - mexpiration = expiration - if is_udp: - timeout = _timeout(mexpiration) - (rwire, _) = await udp_sock.recvfrom(65535, timeout) - else: - ldata = await _read_exactly(tcp_sock, 2, mexpiration) - (l,) = struct.unpack("!H", ldata) - rwire = await _read_exactly(tcp_sock, l, mexpiration) - r = dns.message.from_wire( - rwire, - keyring=query.keyring, - request_mac=query.mac, - xfr=True, - origin=origin, - tsig_ctx=tsig_ctx, - multi=(not is_udp), - one_rr_per_rrset=is_ixfr, - ) - done = inbound.process_message(r) - yield r - tsig_ctx = r.tsig_ctx - if query.keyring and not r.had_tsig: - raise dns.exception.FormError("missing TSIG") - - -async def inbound_xfr( - where: str, - txn_manager: dns.transaction.TransactionManager, - query: Optional[dns.message.Message] = None, - port: int = 53, - timeout: Optional[float] = None, - lifetime: Optional[float] = None, - source: Optional[str] = None, - source_port: int = 0, - udp_mode: UDPMode = UDPMode.NEVER, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> None: - """Conduct an inbound transfer and apply it via a transaction from the - txn_manager. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.inbound_xfr()` for the documentation of - the other parameters, exceptions, and return type of this method. - """ - if query is None: - (query, serial) = dns.xfr.make_query(txn_manager) - else: - serial = dns.xfr.extract_serial_from_query(query) - af = dns.inet.af_for_address(where) - stuple = _source_tuple(af, source, source_port) - dtuple = (where, port) - if not backend: - backend = dns.asyncbackend.get_default_backend() - (_, expiration) = _compute_times(lifetime) - if query.question[0].rdtype == dns.rdatatype.IXFR and udp_mode != UDPMode.NEVER: - s = await backend.make_socket( - af, socket.SOCK_DGRAM, 0, stuple, dtuple, _timeout(expiration) - ) - async with s: - try: - async for _ in _inbound_xfr( - txn_manager, s, query, serial, timeout, expiration - ): - pass - return - except dns.xfr.UseTCP: - if udp_mode == UDPMode.ONLY: - raise - - s = await backend.make_socket( - af, socket.SOCK_STREAM, 0, stuple, dtuple, _timeout(expiration) - ) - async with s: - async for _ in _inbound_xfr(txn_manager, s, query, serial, timeout, expiration): - pass diff --git a/venv/lib/python3.12/site-packages/dns/asyncresolver.py b/venv/lib/python3.12/site-packages/dns/asyncresolver.py deleted file mode 100644 index 8f5e062a..00000000 --- a/venv/lib/python3.12/site-packages/dns/asyncresolver.py +++ /dev/null @@ -1,475 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Asynchronous DNS stub resolver.""" - -import socket -import time -from typing import Any, Dict, List, Optional, Union - -import dns._ddr -import dns.asyncbackend -import dns.asyncquery -import dns.exception -import dns.name -import dns.query -import dns.rdataclass -import dns.rdatatype -import dns.resolver # lgtm[py/import-and-import-from] - -# import some resolver symbols for brevity -from dns.resolver import NXDOMAIN, NoAnswer, NoRootSOA, NotAbsolute - -# for indentation purposes below -_udp = dns.asyncquery.udp -_tcp = dns.asyncquery.tcp - - -class Resolver(dns.resolver.BaseResolver): - """Asynchronous DNS stub resolver.""" - - async def resolve( - self, - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - ) -> dns.resolver.Answer: - """Query nameservers asynchronously to find the answer to the question. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.resolver.Resolver.resolve()` for the - documentation of the other parameters, exceptions, and return - type of this method. - """ - - resolution = dns.resolver._Resolution( - self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search - ) - if not backend: - backend = dns.asyncbackend.get_default_backend() - start = time.time() - while True: - (request, answer) = resolution.next_request() - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - # cache hit! - return answer - assert request is not None # needed for type checking - done = False - while not done: - (nameserver, tcp, backoff) = resolution.next_nameserver() - if backoff: - await backend.sleep(backoff) - timeout = self._compute_timeout(start, lifetime, resolution.errors) - try: - response = await nameserver.async_query( - request, - timeout=timeout, - source=source, - source_port=source_port, - max_size=tcp, - backend=backend, - ) - except Exception as ex: - (_, done) = resolution.query_result(None, ex) - continue - (answer, done) = resolution.query_result(response, None) - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - return answer - - async def resolve_address( - self, ipaddr: str, *args: Any, **kwargs: Any - ) -> dns.resolver.Answer: - """Use an asynchronous resolver to run a reverse query for PTR - records. - - This utilizes the resolve() method to perform a PTR lookup on the - specified IP address. - - *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get - the PTR record for. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs["rdtype"] = dns.rdatatype.PTR - modified_kwargs["rdclass"] = dns.rdataclass.IN - return await self.resolve( - dns.reversename.from_address(ipaddr), *args, **modified_kwargs - ) - - async def resolve_name( - self, - name: Union[dns.name.Name, str], - family: int = socket.AF_UNSPEC, - **kwargs: Any, - ) -> dns.resolver.HostAnswers: - """Use an asynchronous resolver to query for address records. - - This utilizes the resolve() method to perform A and/or AAAA lookups on - the specified name. - - *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. - - *family*, an ``int``, the address family. If socket.AF_UNSPEC - (the default), both A and AAAA records will be retrieved. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs.pop("rdtype", None) - modified_kwargs["rdclass"] = dns.rdataclass.IN - - if family == socket.AF_INET: - v4 = await self.resolve(name, dns.rdatatype.A, **modified_kwargs) - return dns.resolver.HostAnswers.make(v4=v4) - elif family == socket.AF_INET6: - v6 = await self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) - return dns.resolver.HostAnswers.make(v6=v6) - elif family != socket.AF_UNSPEC: - raise NotImplementedError(f"unknown address family {family}") - - raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) - lifetime = modified_kwargs.pop("lifetime", None) - start = time.time() - v6 = await self.resolve( - name, - dns.rdatatype.AAAA, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - # Note that setting name ensures we query the same name - # for A as we did for AAAA. (This is just in case search lists - # are active by default in the resolver configuration and - # we might be talking to a server that says NXDOMAIN when it - # wants to say NOERROR no data. - name = v6.qname - v4 = await self.resolve( - name, - dns.rdatatype.A, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - answers = dns.resolver.HostAnswers.make( - v6=v6, v4=v4, add_empty=not raise_on_no_answer - ) - if not answers: - raise NoAnswer(response=v6.response) - return answers - - # pylint: disable=redefined-outer-name - - async def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - The canonical name is the name the resolver uses for queries - after all CNAME and DNAME renamings have been applied. - - *name*, a ``dns.name.Name`` or ``str``, the query name. - - This method can raise any exception that ``resolve()`` can - raise, other than ``dns.resolver.NoAnswer`` and - ``dns.resolver.NXDOMAIN``. - - Returns a ``dns.name.Name``. - """ - try: - answer = await self.resolve(name, raise_on_no_answer=False) - canonical_name = answer.canonical_name - except dns.resolver.NXDOMAIN as e: - canonical_name = e.canonical_name - return canonical_name - - async def try_ddr(self, lifetime: float = 5.0) -> None: - """Try to update the resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - *lifetime*, a float, is the maximum time to spend attempting DDR. The default - is 5 seconds. - - If the SVCB query is successful and results in a non-empty list of nameservers, - then the resolver's nameservers are set to the returned servers in priority - order. - - The current implementation does not use any address hints from the SVCB record, - nor does it resolve addresses for the SCVB target name, rather it assumes that - the bootstrap nameserver will always be one of the addresses and uses it. - A future revision to the code may offer fuller support. The code verifies that - the bootstrap nameserver is in the Subject Alternative Name field of the - TLS certficate. - """ - try: - expiration = time.time() + lifetime - answer = await self.resolve( - dns._ddr._local_resolver_name, "svcb", lifetime=lifetime - ) - timeout = dns.query._remaining(expiration) - nameservers = await dns._ddr._get_nameservers_async(answer, timeout) - if len(nameservers) > 0: - self.nameservers = nameservers - except Exception: - pass - - -default_resolver = None - - -def get_default_resolver() -> Resolver: - """Get the default asynchronous resolver, initializing it if necessary.""" - if default_resolver is None: - reset_default_resolver() - assert default_resolver is not None - return default_resolver - - -def reset_default_resolver() -> None: - """Re-initialize default asynchronous resolver. - - Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX - systems) will be re-read immediately. - """ - - global default_resolver - default_resolver = Resolver() - - -async def resolve( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> dns.resolver.Answer: - """Query nameservers asynchronously to find the answer to the question. - - This is a convenience function that uses the default resolver - object to make the query. - - See :py:func:`dns.asyncresolver.Resolver.resolve` for more - information on the parameters. - """ - - return await get_default_resolver().resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - backend, - ) - - -async def resolve_address( - ipaddr: str, *args: Any, **kwargs: Any -) -> dns.resolver.Answer: - """Use a resolver to run a reverse query for PTR records. - - See :py:func:`dns.asyncresolver.Resolver.resolve_address` for more - information on the parameters. - """ - - return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs) - - -async def resolve_name( - name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any -) -> dns.resolver.HostAnswers: - """Use a resolver to asynchronously query for address records. - - See :py:func:`dns.asyncresolver.Resolver.resolve_name` for more - information on the parameters. - """ - - return await get_default_resolver().resolve_name(name, family, **kwargs) - - -async def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - See :py:func:`dns.resolver.Resolver.canonical_name` for more - information on the parameters and possible exceptions. - """ - - return await get_default_resolver().canonical_name(name) - - -async def try_ddr(timeout: float = 5.0) -> None: - """Try to update the default resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - See :py:func:`dns.resolver.Resolver.try_ddr` for more information. - """ - return await get_default_resolver().try_ddr(timeout) - - -async def zone_for_name( - name: Union[dns.name.Name, str], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - tcp: bool = False, - resolver: Optional[Resolver] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> dns.name.Name: - """Find the name of the zone which contains the specified name. - - See :py:func:`dns.resolver.Resolver.zone_for_name` for more - information on the parameters and possible exceptions. - """ - - if isinstance(name, str): - name = dns.name.from_text(name, dns.name.root) - if resolver is None: - resolver = get_default_resolver() - if not name.is_absolute(): - raise NotAbsolute(name) - while True: - try: - answer = await resolver.resolve( - name, dns.rdatatype.SOA, rdclass, tcp, backend=backend - ) - assert answer.rrset is not None - if answer.rrset.name == name: - return name - # otherwise we were CNAMEd or DNAMEd and need to look higher - except (NXDOMAIN, NoAnswer): - pass - try: - name = name.parent() - except dns.name.NoParent: # pragma: no cover - raise NoRootSOA - - -async def make_resolver_at( - where: Union[dns.name.Name, str], - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> Resolver: - """Make a stub resolver using the specified destination as the full resolver. - - *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the - full resolver. - - *port*, an ``int``, the port to use. If not specified, the default is 53. - - *family*, an ``int``, the address family to use. This parameter is used if - *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case - the first address returned by ``resolve_name()`` will be used, otherwise the - first address of the specified family will be used. - - *resolver*, a ``dns.asyncresolver.Resolver`` or ``None``, the resolver to use for - resolution of hostnames. If not specified, the default resolver will be used. - - Returns a ``dns.resolver.Resolver`` or raises an exception. - """ - if resolver is None: - resolver = get_default_resolver() - nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] - if isinstance(where, str) and dns.inet.is_address(where): - nameservers.append(dns.nameserver.Do53Nameserver(where, port)) - else: - answers = await resolver.resolve_name(where, family) - for address in answers.addresses(): - nameservers.append(dns.nameserver.Do53Nameserver(address, port)) - res = dns.asyncresolver.Resolver(configure=False) - res.nameservers = nameservers - return res - - -async def resolve_at( - where: Union[dns.name.Name, str], - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> dns.resolver.Answer: - """Query nameservers to find the answer to the question. - - This is a convenience function that calls ``dns.asyncresolver.make_resolver_at()`` - to make a resolver, and then uses it to resolve the query. - - See ``dns.asyncresolver.Resolver.resolve`` for more information on the resolution - parameters, and ``dns.asyncresolver.make_resolver_at`` for information about the - resolver parameters *where*, *port*, *family*, and *resolver*. - - If making more than one query, it is more efficient to call - ``dns.asyncresolver.make_resolver_at()`` and then use that resolver for the queries - instead of calling ``resolve_at()`` multiple times. - """ - res = await make_resolver_at(where, port, family, resolver) - return await res.resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - backend, - ) diff --git a/venv/lib/python3.12/site-packages/dns/dnssec.py b/venv/lib/python3.12/site-packages/dns/dnssec.py deleted file mode 100644 index b69d0a12..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssec.py +++ /dev/null @@ -1,1247 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNSSEC-related functions and constants.""" - - -import base64 -import contextlib -import functools -import hashlib -import struct -import time -from datetime import datetime -from typing import Callable, Dict, List, Optional, Set, Tuple, Union, cast - -import dns._features -import dns.exception -import dns.name -import dns.node -import dns.rdata -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rrset -import dns.transaction -import dns.zone -from dns.dnssectypes import Algorithm, DSDigest, NSEC3Hash -from dns.exception import ( # pylint: disable=W0611 - AlgorithmKeyMismatch, - DeniedByPolicy, - UnsupportedAlgorithm, - ValidationFailure, -) -from dns.rdtypes.ANY.CDNSKEY import CDNSKEY -from dns.rdtypes.ANY.CDS import CDS -from dns.rdtypes.ANY.DNSKEY import DNSKEY -from dns.rdtypes.ANY.DS import DS -from dns.rdtypes.ANY.NSEC import NSEC, Bitmap -from dns.rdtypes.ANY.NSEC3PARAM import NSEC3PARAM -from dns.rdtypes.ANY.RRSIG import RRSIG, sigtime_to_posixtime -from dns.rdtypes.dnskeybase import Flag - -PublicKey = Union[ - "GenericPublicKey", - "rsa.RSAPublicKey", - "ec.EllipticCurvePublicKey", - "ed25519.Ed25519PublicKey", - "ed448.Ed448PublicKey", -] - -PrivateKey = Union[ - "GenericPrivateKey", - "rsa.RSAPrivateKey", - "ec.EllipticCurvePrivateKey", - "ed25519.Ed25519PrivateKey", - "ed448.Ed448PrivateKey", -] - -RRsetSigner = Callable[[dns.transaction.Transaction, dns.rrset.RRset], None] - - -def algorithm_from_text(text: str) -> Algorithm: - """Convert text into a DNSSEC algorithm value. - - *text*, a ``str``, the text to convert to into an algorithm value. - - Returns an ``int``. - """ - - return Algorithm.from_text(text) - - -def algorithm_to_text(value: Union[Algorithm, int]) -> str: - """Convert a DNSSEC algorithm value to text - - *value*, a ``dns.dnssec.Algorithm``. - - Returns a ``str``, the name of a DNSSEC algorithm. - """ - - return Algorithm.to_text(value) - - -def to_timestamp(value: Union[datetime, str, float, int]) -> int: - """Convert various format to a timestamp""" - if isinstance(value, datetime): - return int(value.timestamp()) - elif isinstance(value, str): - return sigtime_to_posixtime(value) - elif isinstance(value, float): - return int(value) - elif isinstance(value, int): - return value - else: - raise TypeError("Unsupported timestamp type") - - -def key_id(key: Union[DNSKEY, CDNSKEY]) -> int: - """Return the key id (a 16-bit number) for the specified key. - - *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` - - Returns an ``int`` between 0 and 65535 - """ - - rdata = key.to_wire() - assert rdata is not None # for mypy - if key.algorithm == Algorithm.RSAMD5: - return (rdata[-3] << 8) + rdata[-2] - else: - total = 0 - for i in range(len(rdata) // 2): - total += (rdata[2 * i] << 8) + rdata[2 * i + 1] - if len(rdata) % 2 != 0: - total += rdata[len(rdata) - 1] << 8 - total += (total >> 16) & 0xFFFF - return total & 0xFFFF - - -class Policy: - def __init__(self): - pass - - def ok_to_sign(self, _: DNSKEY) -> bool: # pragma: no cover - return False - - def ok_to_validate(self, _: DNSKEY) -> bool: # pragma: no cover - return False - - def ok_to_create_ds(self, _: DSDigest) -> bool: # pragma: no cover - return False - - def ok_to_validate_ds(self, _: DSDigest) -> bool: # pragma: no cover - return False - - -class SimpleDeny(Policy): - def __init__(self, deny_sign, deny_validate, deny_create_ds, deny_validate_ds): - super().__init__() - self._deny_sign = deny_sign - self._deny_validate = deny_validate - self._deny_create_ds = deny_create_ds - self._deny_validate_ds = deny_validate_ds - - def ok_to_sign(self, key: DNSKEY) -> bool: - return key.algorithm not in self._deny_sign - - def ok_to_validate(self, key: DNSKEY) -> bool: - return key.algorithm not in self._deny_validate - - def ok_to_create_ds(self, algorithm: DSDigest) -> bool: - return algorithm not in self._deny_create_ds - - def ok_to_validate_ds(self, algorithm: DSDigest) -> bool: - return algorithm not in self._deny_validate_ds - - -rfc_8624_policy = SimpleDeny( - {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1, Algorithm.ECCGOST}, - {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1}, - {DSDigest.NULL, DSDigest.SHA1, DSDigest.GOST}, - {DSDigest.NULL}, -) - -allow_all_policy = SimpleDeny(set(), set(), set(), set()) - - -default_policy = rfc_8624_policy - - -def make_ds( - name: Union[dns.name.Name, str], - key: dns.rdata.Rdata, - algorithm: Union[DSDigest, str], - origin: Optional[dns.name.Name] = None, - policy: Optional[Policy] = None, - validating: bool = False, -) -> DS: - """Create a DS record for a DNSSEC key. - - *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. - - *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, - the key the DS is about. - - *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. - - *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, - then it will be made absolute using the specified origin. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - *validating*, a ``bool``. If ``True``, then policy is checked in - validating mode, i.e. "Is it ok to validate using this digest algorithm?". - Otherwise the policy is checked in creating mode, i.e. "Is it ok to create a DS with - this digest algorithm?". - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Raises ``DeniedByPolicy`` if the algorithm is denied by policy. - - Returns a ``dns.rdtypes.ANY.DS.DS`` - """ - - if policy is None: - policy = default_policy - try: - if isinstance(algorithm, str): - algorithm = DSDigest[algorithm.upper()] - except Exception: - raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') - if validating: - check = policy.ok_to_validate_ds - else: - check = policy.ok_to_create_ds - if not check(algorithm): - raise DeniedByPolicy - if not isinstance(key, (DNSKEY, CDNSKEY)): - raise ValueError("key is not a DNSKEY/CDNSKEY") - if algorithm == DSDigest.SHA1: - dshash = hashlib.sha1() - elif algorithm == DSDigest.SHA256: - dshash = hashlib.sha256() - elif algorithm == DSDigest.SHA384: - dshash = hashlib.sha384() - else: - raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') - - if isinstance(name, str): - name = dns.name.from_text(name, origin) - wire = name.canonicalize().to_wire() - kwire = key.to_wire(origin=origin) - assert wire is not None and kwire is not None # for mypy - dshash.update(wire) - dshash.update(kwire) - digest = dshash.digest() - - dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, algorithm) + digest - ds = dns.rdata.from_wire( - dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, len(dsrdata) - ) - return cast(DS, ds) - - -def make_cds( - name: Union[dns.name.Name, str], - key: dns.rdata.Rdata, - algorithm: Union[DSDigest, str], - origin: Optional[dns.name.Name] = None, -) -> CDS: - """Create a CDS record for a DNSSEC key. - - *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. - - *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, - the key the DS is about. - - *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. - - *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, - then it will be made absolute using the specified origin. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Returns a ``dns.rdtypes.ANY.DS.CDS`` - """ - - ds = make_ds(name, key, algorithm, origin) - return CDS( - rdclass=ds.rdclass, - rdtype=dns.rdatatype.CDS, - key_tag=ds.key_tag, - algorithm=ds.algorithm, - digest_type=ds.digest_type, - digest=ds.digest, - ) - - -def _find_candidate_keys( - keys: Dict[dns.name.Name, Union[dns.rdataset.Rdataset, dns.node.Node]], rrsig: RRSIG -) -> Optional[List[DNSKEY]]: - value = keys.get(rrsig.signer) - if isinstance(value, dns.node.Node): - rdataset = value.get_rdataset(dns.rdataclass.IN, dns.rdatatype.DNSKEY) - else: - rdataset = value - if rdataset is None: - return None - return [ - cast(DNSKEY, rd) - for rd in rdataset - if rd.algorithm == rrsig.algorithm - and key_id(rd) == rrsig.key_tag - and (rd.flags & Flag.ZONE) == Flag.ZONE # RFC 4034 2.1.1 - and rd.protocol == 3 # RFC 4034 2.1.2 - ] - - -def _get_rrname_rdataset( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], -) -> Tuple[dns.name.Name, dns.rdataset.Rdataset]: - if isinstance(rrset, tuple): - return rrset[0], rrset[1] - else: - return rrset.name, rrset - - -def _validate_signature(sig: bytes, data: bytes, key: DNSKEY) -> None: - # pylint: disable=possibly-used-before-assignment - public_cls = get_algorithm_cls_from_dnskey(key).public_cls - try: - public_key = public_cls.from_dnskey(key) - except ValueError: - raise ValidationFailure("invalid public key") - public_key.verify(sig, data) - - -def _validate_rrsig( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - rrsig: RRSIG, - keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], - origin: Optional[dns.name.Name] = None, - now: Optional[float] = None, - policy: Optional[Policy] = None, -) -> None: - """Validate an RRset against a single signature rdata, throwing an - exception if validation is not successful. - - *rrset*, the RRset to validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *rrsig*, a ``dns.rdata.Rdata``, the signature to validate. - - *keys*, the key dictionary, used to find the DNSKEY associated - with a given name. The dictionary is keyed by a - ``dns.name.Name``, and has ``dns.node.Node`` or - ``dns.rdataset.Rdataset`` values. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative - names. - - *now*, a ``float`` or ``None``, the time, in seconds since the epoch, to - use as the current time when validating. If ``None``, the actual current - time is used. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - Raises ``ValidationFailure`` if the signature is expired, not yet valid, - the public key is invalid, the algorithm is unknown, the verification - fails, etc. - - Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by - dnspython but not implemented. - """ - - if policy is None: - policy = default_policy - - candidate_keys = _find_candidate_keys(keys, rrsig) - if candidate_keys is None: - raise ValidationFailure("unknown key") - - if now is None: - now = time.time() - if rrsig.expiration < now: - raise ValidationFailure("expired") - if rrsig.inception > now: - raise ValidationFailure("not yet valid") - - data = _make_rrsig_signature_data(rrset, rrsig, origin) - - # pylint: disable=possibly-used-before-assignment - for candidate_key in candidate_keys: - if not policy.ok_to_validate(candidate_key): - continue - try: - _validate_signature(rrsig.signature, data, candidate_key) - return - except (InvalidSignature, ValidationFailure): - # this happens on an individual validation failure - continue - # nothing verified -- raise failure: - raise ValidationFailure("verify failure") - - -def _validate( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - rrsigset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], - origin: Optional[dns.name.Name] = None, - now: Optional[float] = None, - policy: Optional[Policy] = None, -) -> None: - """Validate an RRset against a signature RRset, throwing an exception - if none of the signatures validate. - - *rrset*, the RRset to validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *rrsigset*, the signature RRset. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *keys*, the key dictionary, used to find the DNSKEY associated - with a given name. The dictionary is keyed by a - ``dns.name.Name``, and has ``dns.node.Node`` or - ``dns.rdataset.Rdataset`` values. - - *origin*, a ``dns.name.Name``, the origin to use for relative names; - defaults to None. - - *now*, an ``int`` or ``None``, the time, in seconds since the epoch, to - use as the current time when validating. If ``None``, the actual current - time is used. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - Raises ``ValidationFailure`` if the signature is expired, not yet valid, - the public key is invalid, the algorithm is unknown, the verification - fails, etc. - """ - - if policy is None: - policy = default_policy - - if isinstance(origin, str): - origin = dns.name.from_text(origin, dns.name.root) - - if isinstance(rrset, tuple): - rrname = rrset[0] - else: - rrname = rrset.name - - if isinstance(rrsigset, tuple): - rrsigname = rrsigset[0] - rrsigrdataset = rrsigset[1] - else: - rrsigname = rrsigset.name - rrsigrdataset = rrsigset - - rrname = rrname.choose_relativity(origin) - rrsigname = rrsigname.choose_relativity(origin) - if rrname != rrsigname: - raise ValidationFailure("owner names do not match") - - for rrsig in rrsigrdataset: - if not isinstance(rrsig, RRSIG): - raise ValidationFailure("expected an RRSIG") - try: - _validate_rrsig(rrset, rrsig, keys, origin, now, policy) - return - except (ValidationFailure, UnsupportedAlgorithm): - pass - raise ValidationFailure("no RRSIGs validated") - - -def _sign( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - private_key: PrivateKey, - signer: dns.name.Name, - dnskey: DNSKEY, - inception: Optional[Union[datetime, str, int, float]] = None, - expiration: Optional[Union[datetime, str, int, float]] = None, - lifetime: Optional[int] = None, - verify: bool = False, - policy: Optional[Policy] = None, - origin: Optional[dns.name.Name] = None, - deterministic: bool = True, -) -> RRSIG: - """Sign RRset using private key. - - *rrset*, the RRset to validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *private_key*, the private key to use for signing, a - ``cryptography.hazmat.primitives.asymmetric`` private key class applicable - for DNSSEC. - - *signer*, a ``dns.name.Name``, the Signer's name. - - *dnskey*, a ``DNSKEY`` matching ``private_key``. - - *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the - signature inception time. If ``None``, the current time is used. If a ``str``, the - format is "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX - epoch in text form; this is the same the RRSIG rdata's text form. - Values of type `int` or `float` are interpreted as seconds since the UNIX epoch. - - *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature - expiration time. If ``None``, the expiration time will be the inception time plus - the value of the *lifetime* parameter. See the description of *inception* above - for how the various parameter types are interpreted. - - *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This - parameter is only meaningful if *expiration* is ``None``. - - *verify*, a ``bool``. If set to ``True``, the signer will verify signatures - after they are created; the default is ``False``. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - *origin*, a ``dns.name.Name`` or ``None``. If ``None``, the default, then all - names in the rrset (including its owner name) must be absolute; otherwise the - specified origin will be used to make names absolute when signing. - - *deterministic*, a ``bool``. If ``True``, the default, use deterministic - (reproducible) signatures when supported by the algorithm used for signing. - Currently, this only affects ECDSA. - - Raises ``DeniedByPolicy`` if the signature is denied by policy. - """ - - if policy is None: - policy = default_policy - if not policy.ok_to_sign(dnskey): - raise DeniedByPolicy - - if isinstance(rrset, tuple): - rdclass = rrset[1].rdclass - rdtype = rrset[1].rdtype - rrname = rrset[0] - original_ttl = rrset[1].ttl - else: - rdclass = rrset.rdclass - rdtype = rrset.rdtype - rrname = rrset.name - original_ttl = rrset.ttl - - if inception is not None: - rrsig_inception = to_timestamp(inception) - else: - rrsig_inception = int(time.time()) - - if expiration is not None: - rrsig_expiration = to_timestamp(expiration) - elif lifetime is not None: - rrsig_expiration = rrsig_inception + lifetime - else: - raise ValueError("expiration or lifetime must be specified") - - # Derelativize now because we need a correct labels length for the - # rrsig_template. - if origin is not None: - rrname = rrname.derelativize(origin) - labels = len(rrname) - 1 - - # Adjust labels appropriately for wildcards. - if rrname.is_wild(): - labels -= 1 - - rrsig_template = RRSIG( - rdclass=rdclass, - rdtype=dns.rdatatype.RRSIG, - type_covered=rdtype, - algorithm=dnskey.algorithm, - labels=labels, - original_ttl=original_ttl, - expiration=rrsig_expiration, - inception=rrsig_inception, - key_tag=key_id(dnskey), - signer=signer, - signature=b"", - ) - - data = dns.dnssec._make_rrsig_signature_data(rrset, rrsig_template, origin) - - # pylint: disable=possibly-used-before-assignment - if isinstance(private_key, GenericPrivateKey): - signing_key = private_key - else: - try: - private_cls = get_algorithm_cls_from_dnskey(dnskey) - signing_key = private_cls(key=private_key) - except UnsupportedAlgorithm: - raise TypeError("Unsupported key algorithm") - - signature = signing_key.sign(data, verify, deterministic) - - return cast(RRSIG, rrsig_template.replace(signature=signature)) - - -def _make_rrsig_signature_data( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - rrsig: RRSIG, - origin: Optional[dns.name.Name] = None, -) -> bytes: - """Create signature rdata. - - *rrset*, the RRset to sign/validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *rrsig*, a ``dns.rdata.Rdata``, the signature to validate, or the - signature template used when signing. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative - names. - - Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by - dnspython but not implemented. - """ - - if isinstance(origin, str): - origin = dns.name.from_text(origin, dns.name.root) - - signer = rrsig.signer - if not signer.is_absolute(): - if origin is None: - raise ValidationFailure("relative RR name without an origin specified") - signer = signer.derelativize(origin) - - # For convenience, allow the rrset to be specified as a (name, - # rdataset) tuple as well as a proper rrset - rrname, rdataset = _get_rrname_rdataset(rrset) - - data = b"" - wire = rrsig.to_wire(origin=signer) - assert wire is not None # for mypy - data += wire[:18] - data += rrsig.signer.to_digestable(signer) - - # Derelativize the name before considering labels. - if not rrname.is_absolute(): - if origin is None: - raise ValidationFailure("relative RR name without an origin specified") - rrname = rrname.derelativize(origin) - - name_len = len(rrname) - if rrname.is_wild() and rrsig.labels != name_len - 2: - raise ValidationFailure("wild owner name has wrong label length") - if name_len - 1 < rrsig.labels: - raise ValidationFailure("owner name longer than RRSIG labels") - elif rrsig.labels < name_len - 1: - suffix = rrname.split(rrsig.labels + 1)[1] - rrname = dns.name.from_text("*", suffix) - rrnamebuf = rrname.to_digestable() - rrfixed = struct.pack("!HHI", rdataset.rdtype, rdataset.rdclass, rrsig.original_ttl) - rdatas = [rdata.to_digestable(origin) for rdata in rdataset] - for rdata in sorted(rdatas): - data += rrnamebuf - data += rrfixed - rrlen = struct.pack("!H", len(rdata)) - data += rrlen - data += rdata - - return data - - -def _make_dnskey( - public_key: PublicKey, - algorithm: Union[int, str], - flags: int = Flag.ZONE, - protocol: int = 3, -) -> DNSKEY: - """Convert a public key to DNSKEY Rdata - - *public_key*, a ``PublicKey`` (``GenericPublicKey`` or - ``cryptography.hazmat.primitives.asymmetric``) to convert. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - *flags*: DNSKEY flags field as an integer. - - *protocol*: DNSKEY protocol field as an integer. - - Raises ``ValueError`` if the specified key algorithm parameters are not - unsupported, ``TypeError`` if the key type is unsupported, - `UnsupportedAlgorithm` if the algorithm is unknown and - `AlgorithmKeyMismatch` if the algorithm does not match the key type. - - Return DNSKEY ``Rdata``. - """ - - algorithm = Algorithm.make(algorithm) - - # pylint: disable=possibly-used-before-assignment - if isinstance(public_key, GenericPublicKey): - return public_key.to_dnskey(flags=flags, protocol=protocol) - else: - public_cls = get_algorithm_cls(algorithm).public_cls - return public_cls(key=public_key).to_dnskey(flags=flags, protocol=protocol) - - -def _make_cdnskey( - public_key: PublicKey, - algorithm: Union[int, str], - flags: int = Flag.ZONE, - protocol: int = 3, -) -> CDNSKEY: - """Convert a public key to CDNSKEY Rdata - - *public_key*, the public key to convert, a - ``cryptography.hazmat.primitives.asymmetric`` public key class applicable - for DNSSEC. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - *flags*: DNSKEY flags field as an integer. - - *protocol*: DNSKEY protocol field as an integer. - - Raises ``ValueError`` if the specified key algorithm parameters are not - unsupported, ``TypeError`` if the key type is unsupported, - `UnsupportedAlgorithm` if the algorithm is unknown and - `AlgorithmKeyMismatch` if the algorithm does not match the key type. - - Return CDNSKEY ``Rdata``. - """ - - dnskey = _make_dnskey(public_key, algorithm, flags, protocol) - - return CDNSKEY( - rdclass=dnskey.rdclass, - rdtype=dns.rdatatype.CDNSKEY, - flags=dnskey.flags, - protocol=dnskey.protocol, - algorithm=dnskey.algorithm, - key=dnskey.key, - ) - - -def nsec3_hash( - domain: Union[dns.name.Name, str], - salt: Optional[Union[str, bytes]], - iterations: int, - algorithm: Union[int, str], -) -> str: - """ - Calculate the NSEC3 hash, according to - https://tools.ietf.org/html/rfc5155#section-5 - - *domain*, a ``dns.name.Name`` or ``str``, the name to hash. - - *salt*, a ``str``, ``bytes``, or ``None``, the hash salt. If a - string, it is decoded as a hex string. - - *iterations*, an ``int``, the number of iterations. - - *algorithm*, a ``str`` or ``int``, the hash algorithm. - The only defined algorithm is SHA1. - - Returns a ``str``, the encoded NSEC3 hash. - """ - - b32_conversion = str.maketrans( - "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", "0123456789ABCDEFGHIJKLMNOPQRSTUV" - ) - - try: - if isinstance(algorithm, str): - algorithm = NSEC3Hash[algorithm.upper()] - except Exception: - raise ValueError("Wrong hash algorithm (only SHA1 is supported)") - - if algorithm != NSEC3Hash.SHA1: - raise ValueError("Wrong hash algorithm (only SHA1 is supported)") - - if salt is None: - salt_encoded = b"" - elif isinstance(salt, str): - if len(salt) % 2 == 0: - salt_encoded = bytes.fromhex(salt) - else: - raise ValueError("Invalid salt length") - else: - salt_encoded = salt - - if not isinstance(domain, dns.name.Name): - domain = dns.name.from_text(domain) - domain_encoded = domain.canonicalize().to_wire() - assert domain_encoded is not None - - digest = hashlib.sha1(domain_encoded + salt_encoded).digest() - for _ in range(iterations): - digest = hashlib.sha1(digest + salt_encoded).digest() - - output = base64.b32encode(digest).decode("utf-8") - output = output.translate(b32_conversion) - - return output - - -def make_ds_rdataset( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - algorithms: Set[Union[DSDigest, str]], - origin: Optional[dns.name.Name] = None, -) -> dns.rdataset.Rdataset: - """Create a DS record from DNSKEY/CDNSKEY/CDS. - - *rrset*, the RRset to create DS Rdataset for. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *algorithms*, a set of ``str`` or ``int`` specifying the hash algorithms. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. If the RRset is a CDS, only digest - algorithms matching algorithms are accepted. - - *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, - then it will be made absolute using the specified origin. - - Raises ``UnsupportedAlgorithm`` if any of the algorithms are unknown and - ``ValueError`` if the given RRset is not usable. - - Returns a ``dns.rdataset.Rdataset`` - """ - - rrname, rdataset = _get_rrname_rdataset(rrset) - - if rdataset.rdtype not in ( - dns.rdatatype.DNSKEY, - dns.rdatatype.CDNSKEY, - dns.rdatatype.CDS, - ): - raise ValueError("rrset not a DNSKEY/CDNSKEY/CDS") - - _algorithms = set() - for algorithm in algorithms: - try: - if isinstance(algorithm, str): - algorithm = DSDigest[algorithm.upper()] - except Exception: - raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') - _algorithms.add(algorithm) - - if rdataset.rdtype == dns.rdatatype.CDS: - res = [] - for rdata in cds_rdataset_to_ds_rdataset(rdataset): - if rdata.digest_type in _algorithms: - res.append(rdata) - if len(res) == 0: - raise ValueError("no acceptable CDS rdata found") - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - res = [] - for algorithm in _algorithms: - res.extend(dnskey_rdataset_to_cds_rdataset(rrname, rdataset, algorithm, origin)) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def cds_rdataset_to_ds_rdataset( - rdataset: dns.rdataset.Rdataset, -) -> dns.rdataset.Rdataset: - """Create a CDS record from DS. - - *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. - - Raises ``ValueError`` if the rdataset is not CDS. - - Returns a ``dns.rdataset.Rdataset`` - """ - - if rdataset.rdtype != dns.rdatatype.CDS: - raise ValueError("rdataset not a CDS") - res = [] - for rdata in rdataset: - res.append( - CDS( - rdclass=rdata.rdclass, - rdtype=dns.rdatatype.DS, - key_tag=rdata.key_tag, - algorithm=rdata.algorithm, - digest_type=rdata.digest_type, - digest=rdata.digest, - ) - ) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def dnskey_rdataset_to_cds_rdataset( - name: Union[dns.name.Name, str], - rdataset: dns.rdataset.Rdataset, - algorithm: Union[DSDigest, str], - origin: Optional[dns.name.Name] = None, -) -> dns.rdataset.Rdataset: - """Create a CDS record from DNSKEY/CDNSKEY. - - *name*, a ``dns.name.Name`` or ``str``, the owner name of the CDS record. - - *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. - - *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. - - *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, - then it will be made absolute using the specified origin. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown or - ``ValueError`` if the rdataset is not DNSKEY/CDNSKEY. - - Returns a ``dns.rdataset.Rdataset`` - """ - - if rdataset.rdtype not in (dns.rdatatype.DNSKEY, dns.rdatatype.CDNSKEY): - raise ValueError("rdataset not a DNSKEY/CDNSKEY") - res = [] - for rdata in rdataset: - res.append(make_cds(name, rdata, algorithm, origin)) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def dnskey_rdataset_to_cdnskey_rdataset( - rdataset: dns.rdataset.Rdataset, -) -> dns.rdataset.Rdataset: - """Create a CDNSKEY record from DNSKEY. - - *rdataset*, a ``dns.rdataset.Rdataset``, to create CDNSKEY Rdataset for. - - Returns a ``dns.rdataset.Rdataset`` - """ - - if rdataset.rdtype != dns.rdatatype.DNSKEY: - raise ValueError("rdataset not a DNSKEY") - res = [] - for rdata in rdataset: - res.append( - CDNSKEY( - rdclass=rdataset.rdclass, - rdtype=rdataset.rdtype, - flags=rdata.flags, - protocol=rdata.protocol, - algorithm=rdata.algorithm, - key=rdata.key, - ) - ) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def default_rrset_signer( - txn: dns.transaction.Transaction, - rrset: dns.rrset.RRset, - signer: dns.name.Name, - ksks: List[Tuple[PrivateKey, DNSKEY]], - zsks: List[Tuple[PrivateKey, DNSKEY]], - inception: Optional[Union[datetime, str, int, float]] = None, - expiration: Optional[Union[datetime, str, int, float]] = None, - lifetime: Optional[int] = None, - policy: Optional[Policy] = None, - origin: Optional[dns.name.Name] = None, - deterministic: bool = True, -) -> None: - """Default RRset signer""" - - if rrset.rdtype in set( - [ - dns.rdatatype.RdataType.DNSKEY, - dns.rdatatype.RdataType.CDS, - dns.rdatatype.RdataType.CDNSKEY, - ] - ): - keys = ksks - else: - keys = zsks - - for private_key, dnskey in keys: - rrsig = dns.dnssec.sign( - rrset=rrset, - private_key=private_key, - dnskey=dnskey, - inception=inception, - expiration=expiration, - lifetime=lifetime, - signer=signer, - policy=policy, - origin=origin, - deterministic=deterministic, - ) - txn.add(rrset.name, rrset.ttl, rrsig) - - -def sign_zone( - zone: dns.zone.Zone, - txn: Optional[dns.transaction.Transaction] = None, - keys: Optional[List[Tuple[PrivateKey, DNSKEY]]] = None, - add_dnskey: bool = True, - dnskey_ttl: Optional[int] = None, - inception: Optional[Union[datetime, str, int, float]] = None, - expiration: Optional[Union[datetime, str, int, float]] = None, - lifetime: Optional[int] = None, - nsec3: Optional[NSEC3PARAM] = None, - rrset_signer: Optional[RRsetSigner] = None, - policy: Optional[Policy] = None, - deterministic: bool = True, -) -> None: - """Sign zone. - - *zone*, a ``dns.zone.Zone``, the zone to sign. - - *txn*, a ``dns.transaction.Transaction``, an optional transaction to use for - signing. - - *keys*, a list of (``PrivateKey``, ``DNSKEY``) tuples, to use for signing. KSK/ZSK - roles are assigned automatically if the SEP flag is used, otherwise all RRsets are - signed by all keys. - - *add_dnskey*, a ``bool``. If ``True``, the default, all specified DNSKEYs are - automatically added to the zone on signing. - - *dnskey_ttl*, a``int``, specifies the TTL for DNSKEY RRs. If not specified the TTL - of the existing DNSKEY RRset used or the TTL of the SOA RRset. - - *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature - inception time. If ``None``, the current time is used. If a ``str``, the format is - "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX epoch in text - form; this is the same the RRSIG rdata's text form. Values of type `int` or `float` - are interpreted as seconds since the UNIX epoch. - - *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature - expiration time. If ``None``, the expiration time will be the inception time plus - the value of the *lifetime* parameter. See the description of *inception* above for - how the various parameter types are interpreted. - - *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This - parameter is only meaningful if *expiration* is ``None``. - - *nsec3*, a ``NSEC3PARAM`` Rdata, configures signing using NSEC3. Not yet - implemented. - - *rrset_signer*, a ``Callable``, an optional function for signing RRsets. The - function requires two arguments: transaction and RRset. If the not specified, - ``dns.dnssec.default_rrset_signer`` will be used. - - *deterministic*, a ``bool``. If ``True``, the default, use deterministic - (reproducible) signatures when supported by the algorithm used for signing. - Currently, this only affects ECDSA. - - Returns ``None``. - """ - - ksks = [] - zsks = [] - - # if we have both KSKs and ZSKs, split by SEP flag. if not, sign all - # records with all keys - if keys: - for key in keys: - if key[1].flags & Flag.SEP: - ksks.append(key) - else: - zsks.append(key) - if not ksks: - ksks = keys - if not zsks: - zsks = keys - else: - keys = [] - - if txn: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(txn) - else: - cm = zone.writer() - - if zone.origin is None: - raise ValueError("no zone origin") - - with cm as _txn: - if add_dnskey: - if dnskey_ttl is None: - dnskey = _txn.get(zone.origin, dns.rdatatype.DNSKEY) - if dnskey: - dnskey_ttl = dnskey.ttl - else: - soa = _txn.get(zone.origin, dns.rdatatype.SOA) - dnskey_ttl = soa.ttl - for _, dnskey in keys: - _txn.add(zone.origin, dnskey_ttl, dnskey) - - if nsec3: - raise NotImplementedError("Signing with NSEC3 not yet implemented") - else: - _rrset_signer = rrset_signer or functools.partial( - default_rrset_signer, - signer=zone.origin, - ksks=ksks, - zsks=zsks, - inception=inception, - expiration=expiration, - lifetime=lifetime, - policy=policy, - origin=zone.origin, - deterministic=deterministic, - ) - return _sign_zone_nsec(zone, _txn, _rrset_signer) - - -def _sign_zone_nsec( - zone: dns.zone.Zone, - txn: dns.transaction.Transaction, - rrset_signer: Optional[RRsetSigner] = None, -) -> None: - """NSEC zone signer""" - - def _txn_add_nsec( - txn: dns.transaction.Transaction, - name: dns.name.Name, - next_secure: Optional[dns.name.Name], - rdclass: dns.rdataclass.RdataClass, - ttl: int, - rrset_signer: Optional[RRsetSigner] = None, - ) -> None: - """NSEC zone signer helper""" - mandatory_types = set( - [dns.rdatatype.RdataType.RRSIG, dns.rdatatype.RdataType.NSEC] - ) - node = txn.get_node(name) - if node and next_secure: - types = ( - set([rdataset.rdtype for rdataset in node.rdatasets]) | mandatory_types - ) - windows = Bitmap.from_rdtypes(list(types)) - rrset = dns.rrset.from_rdata( - name, - ttl, - NSEC( - rdclass=rdclass, - rdtype=dns.rdatatype.RdataType.NSEC, - next=next_secure, - windows=windows, - ), - ) - txn.add(rrset) - if rrset_signer: - rrset_signer(txn, rrset) - - rrsig_ttl = zone.get_soa().minimum - delegation = None - last_secure = None - - for name in sorted(txn.iterate_names()): - if delegation and name.is_subdomain(delegation): - # names below delegations are not secure - continue - elif txn.get(name, dns.rdatatype.NS) and name != zone.origin: - # inside delegation - delegation = name - else: - # outside delegation - delegation = None - - if rrset_signer: - node = txn.get_node(name) - if node: - for rdataset in node.rdatasets: - if rdataset.rdtype == dns.rdatatype.RRSIG: - # do not sign RRSIGs - continue - elif delegation and rdataset.rdtype != dns.rdatatype.DS: - # do not sign delegations except DS records - continue - else: - rrset = dns.rrset.from_rdata(name, rdataset.ttl, *rdataset) - rrset_signer(txn, rrset) - - # We need "is not None" as the empty name is False because its length is 0. - if last_secure is not None: - _txn_add_nsec(txn, last_secure, name, zone.rdclass, rrsig_ttl, rrset_signer) - last_secure = name - - if last_secure: - _txn_add_nsec( - txn, last_secure, zone.origin, zone.rdclass, rrsig_ttl, rrset_signer - ) - - -def _need_pyca(*args, **kwargs): - raise ImportError( - "DNSSEC validation requires python cryptography" - ) # pragma: no cover - - -if dns._features.have("dnssec"): - from cryptography.exceptions import InvalidSignature - from cryptography.hazmat.primitives.asymmetric import dsa # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import ec # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import ed448 # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import rsa # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import ( # pylint: disable=W0611 - ed25519, - ) - - from dns.dnssecalgs import ( # pylint: disable=C0412 - get_algorithm_cls, - get_algorithm_cls_from_dnskey, - ) - from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey - - validate = _validate # type: ignore - validate_rrsig = _validate_rrsig # type: ignore - sign = _sign - make_dnskey = _make_dnskey - make_cdnskey = _make_cdnskey - _have_pyca = True -else: # pragma: no cover - validate = _need_pyca - validate_rrsig = _need_pyca - sign = _need_pyca - make_dnskey = _need_pyca - make_cdnskey = _need_pyca - _have_pyca = False - -### BEGIN generated Algorithm constants - -RSAMD5 = Algorithm.RSAMD5 -DH = Algorithm.DH -DSA = Algorithm.DSA -ECC = Algorithm.ECC -RSASHA1 = Algorithm.RSASHA1 -DSANSEC3SHA1 = Algorithm.DSANSEC3SHA1 -RSASHA1NSEC3SHA1 = Algorithm.RSASHA1NSEC3SHA1 -RSASHA256 = Algorithm.RSASHA256 -RSASHA512 = Algorithm.RSASHA512 -ECCGOST = Algorithm.ECCGOST -ECDSAP256SHA256 = Algorithm.ECDSAP256SHA256 -ECDSAP384SHA384 = Algorithm.ECDSAP384SHA384 -ED25519 = Algorithm.ED25519 -ED448 = Algorithm.ED448 -INDIRECT = Algorithm.INDIRECT -PRIVATEDNS = Algorithm.PRIVATEDNS -PRIVATEOID = Algorithm.PRIVATEOID - -### END generated Algorithm constants diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__init__.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__init__.py deleted file mode 100644 index 602367e3..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__init__.py +++ /dev/null @@ -1,121 +0,0 @@ -from typing import Dict, Optional, Tuple, Type, Union - -import dns.name -from dns.dnssecalgs.base import GenericPrivateKey -from dns.dnssectypes import Algorithm -from dns.exception import UnsupportedAlgorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - -if dns._features.have("dnssec"): - from dns.dnssecalgs.dsa import PrivateDSA, PrivateDSANSEC3SHA1 - from dns.dnssecalgs.ecdsa import PrivateECDSAP256SHA256, PrivateECDSAP384SHA384 - from dns.dnssecalgs.eddsa import PrivateED448, PrivateED25519 - from dns.dnssecalgs.rsa import ( - PrivateRSAMD5, - PrivateRSASHA1, - PrivateRSASHA1NSEC3SHA1, - PrivateRSASHA256, - PrivateRSASHA512, - ) - - _have_cryptography = True -else: - _have_cryptography = False - -AlgorithmPrefix = Optional[Union[bytes, dns.name.Name]] - -algorithms: Dict[Tuple[Algorithm, AlgorithmPrefix], Type[GenericPrivateKey]] = {} -if _have_cryptography: - # pylint: disable=possibly-used-before-assignment - algorithms.update( - { - (Algorithm.RSAMD5, None): PrivateRSAMD5, - (Algorithm.DSA, None): PrivateDSA, - (Algorithm.RSASHA1, None): PrivateRSASHA1, - (Algorithm.DSANSEC3SHA1, None): PrivateDSANSEC3SHA1, - (Algorithm.RSASHA1NSEC3SHA1, None): PrivateRSASHA1NSEC3SHA1, - (Algorithm.RSASHA256, None): PrivateRSASHA256, - (Algorithm.RSASHA512, None): PrivateRSASHA512, - (Algorithm.ECDSAP256SHA256, None): PrivateECDSAP256SHA256, - (Algorithm.ECDSAP384SHA384, None): PrivateECDSAP384SHA384, - (Algorithm.ED25519, None): PrivateED25519, - (Algorithm.ED448, None): PrivateED448, - } - ) - - -def get_algorithm_cls( - algorithm: Union[int, str], prefix: AlgorithmPrefix = None -) -> Type[GenericPrivateKey]: - """Get Private Key class from Algorithm. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Returns a ``dns.dnssecalgs.GenericPrivateKey`` - """ - algorithm = Algorithm.make(algorithm) - cls = algorithms.get((algorithm, prefix)) - if cls: - return cls - raise UnsupportedAlgorithm( - f'algorithm "{Algorithm.to_text(algorithm)}" not supported by dnspython' - ) - - -def get_algorithm_cls_from_dnskey(dnskey: DNSKEY) -> Type[GenericPrivateKey]: - """Get Private Key class from DNSKEY. - - *dnskey*, a ``DNSKEY`` to get Algorithm class for. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Returns a ``dns.dnssecalgs.GenericPrivateKey`` - """ - prefix: AlgorithmPrefix = None - if dnskey.algorithm == Algorithm.PRIVATEDNS: - prefix, _ = dns.name.from_wire(dnskey.key, 0) - elif dnskey.algorithm == Algorithm.PRIVATEOID: - length = int(dnskey.key[0]) - prefix = dnskey.key[0 : length + 1] - return get_algorithm_cls(dnskey.algorithm, prefix) - - -def register_algorithm_cls( - algorithm: Union[int, str], - algorithm_cls: Type[GenericPrivateKey], - name: Optional[Union[dns.name.Name, str]] = None, - oid: Optional[bytes] = None, -) -> None: - """Register Algorithm Private Key class. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - *algorithm_cls*: A `GenericPrivateKey` class. - - *name*, an optional ``dns.name.Name`` or ``str``, for for PRIVATEDNS algorithms. - - *oid*: an optional BER-encoded `bytes` for PRIVATEOID algorithms. - - Raises ``ValueError`` if a name or oid is specified incorrectly. - """ - if not issubclass(algorithm_cls, GenericPrivateKey): - raise TypeError("Invalid algorithm class") - algorithm = Algorithm.make(algorithm) - prefix: AlgorithmPrefix = None - if algorithm == Algorithm.PRIVATEDNS: - if name is None: - raise ValueError("Name required for PRIVATEDNS algorithms") - if isinstance(name, str): - name = dns.name.from_text(name) - prefix = name - elif algorithm == Algorithm.PRIVATEOID: - if oid is None: - raise ValueError("OID required for PRIVATEOID algorithms") - prefix = bytes([len(oid)]) + oid - elif name: - raise ValueError("Name only supported for PRIVATEDNS algorithm") - elif oid: - raise ValueError("OID only supported for PRIVATEOID algorithm") - algorithms[(algorithm, prefix)] = algorithm_cls diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 037985ac..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 51f13d74..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc deleted file mode 100644 index 9feab16e..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc deleted file mode 100644 index d6240e25..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc deleted file mode 100644 index 8ce912a3..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc deleted file mode 100644 index 9ae096e7..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc deleted file mode 100644 index 67c032a8..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/base.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/base.py deleted file mode 100644 index 752ee480..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssecalgs/base.py +++ /dev/null @@ -1,89 +0,0 @@ -from abc import ABC, abstractmethod # pylint: disable=no-name-in-module -from typing import Any, Optional, Type - -import dns.rdataclass -import dns.rdatatype -from dns.dnssectypes import Algorithm -from dns.exception import AlgorithmKeyMismatch -from dns.rdtypes.ANY.DNSKEY import DNSKEY -from dns.rdtypes.dnskeybase import Flag - - -class GenericPublicKey(ABC): - algorithm: Algorithm - - @abstractmethod - def __init__(self, key: Any) -> None: - pass - - @abstractmethod - def verify(self, signature: bytes, data: bytes) -> None: - """Verify signed DNSSEC data""" - - @abstractmethod - def encode_key_bytes(self) -> bytes: - """Encode key as bytes for DNSKEY""" - - @classmethod - def _ensure_algorithm_key_combination(cls, key: DNSKEY) -> None: - if key.algorithm != cls.algorithm: - raise AlgorithmKeyMismatch - - def to_dnskey(self, flags: int = Flag.ZONE, protocol: int = 3) -> DNSKEY: - """Return public key as DNSKEY""" - return DNSKEY( - rdclass=dns.rdataclass.IN, - rdtype=dns.rdatatype.DNSKEY, - flags=flags, - protocol=protocol, - algorithm=self.algorithm, - key=self.encode_key_bytes(), - ) - - @classmethod - @abstractmethod - def from_dnskey(cls, key: DNSKEY) -> "GenericPublicKey": - """Create public key from DNSKEY""" - - @classmethod - @abstractmethod - def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": - """Create public key from PEM-encoded SubjectPublicKeyInfo as specified - in RFC 5280""" - - @abstractmethod - def to_pem(self) -> bytes: - """Return public-key as PEM-encoded SubjectPublicKeyInfo as specified - in RFC 5280""" - - -class GenericPrivateKey(ABC): - public_cls: Type[GenericPublicKey] - - @abstractmethod - def __init__(self, key: Any) -> None: - pass - - @abstractmethod - def sign( - self, - data: bytes, - verify: bool = False, - deterministic: bool = True, - ) -> bytes: - """Sign DNSSEC data""" - - @abstractmethod - def public_key(self) -> "GenericPublicKey": - """Return public key instance""" - - @classmethod - @abstractmethod - def from_pem( - cls, private_pem: bytes, password: Optional[bytes] = None - ) -> "GenericPrivateKey": - """Create private key from PEM-encoded PKCS#8""" - - @abstractmethod - def to_pem(self, password: Optional[bytes] = None) -> bytes: - """Return private key as PEM-encoded PKCS#8""" diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/cryptography.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/cryptography.py deleted file mode 100644 index 5a31a812..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssecalgs/cryptography.py +++ /dev/null @@ -1,68 +0,0 @@ -from typing import Any, Optional, Type - -from cryptography.hazmat.primitives import serialization - -from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey -from dns.exception import AlgorithmKeyMismatch - - -class CryptographyPublicKey(GenericPublicKey): - key: Any = None - key_cls: Any = None - - def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called - if self.key_cls is None: - raise TypeError("Undefined private key class") - if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type - key, self.key_cls - ): - raise AlgorithmKeyMismatch - self.key = key - - @classmethod - def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": - key = serialization.load_pem_public_key(public_pem) - return cls(key=key) - - def to_pem(self) -> bytes: - return self.key.public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo, - ) - - -class CryptographyPrivateKey(GenericPrivateKey): - key: Any = None - key_cls: Any = None - public_cls: Type[CryptographyPublicKey] - - def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called - if self.key_cls is None: - raise TypeError("Undefined private key class") - if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type - key, self.key_cls - ): - raise AlgorithmKeyMismatch - self.key = key - - def public_key(self) -> "CryptographyPublicKey": - return self.public_cls(key=self.key.public_key()) - - @classmethod - def from_pem( - cls, private_pem: bytes, password: Optional[bytes] = None - ) -> "GenericPrivateKey": - key = serialization.load_pem_private_key(private_pem, password=password) - return cls(key=key) - - def to_pem(self, password: Optional[bytes] = None) -> bytes: - encryption_algorithm: serialization.KeySerializationEncryption - if password: - encryption_algorithm = serialization.BestAvailableEncryption(password) - else: - encryption_algorithm = serialization.NoEncryption() - return self.key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=encryption_algorithm, - ) diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/dsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/dsa.py deleted file mode 100644 index adca3def..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssecalgs/dsa.py +++ /dev/null @@ -1,106 +0,0 @@ -import struct - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import dsa, utils - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicDSA(CryptographyPublicKey): - key: dsa.DSAPublicKey - key_cls = dsa.DSAPublicKey - algorithm = Algorithm.DSA - chosen_hash = hashes.SHA1() - - def verify(self, signature: bytes, data: bytes) -> None: - sig_r = signature[1:21] - sig_s = signature[21:] - sig = utils.encode_dss_signature( - int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") - ) - self.key.verify(sig, data, self.chosen_hash) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 2536, section 2.""" - pn = self.key.public_numbers() - dsa_t = (self.key.key_size // 8 - 64) // 8 - if dsa_t > 8: - raise ValueError("unsupported DSA key size") - octets = 64 + dsa_t * 8 - res = struct.pack("!B", dsa_t) - res += pn.parameter_numbers.q.to_bytes(20, "big") - res += pn.parameter_numbers.p.to_bytes(octets, "big") - res += pn.parameter_numbers.g.to_bytes(octets, "big") - res += pn.y.to_bytes(octets, "big") - return res - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicDSA": - cls._ensure_algorithm_key_combination(key) - keyptr = key.key - (t,) = struct.unpack("!B", keyptr[0:1]) - keyptr = keyptr[1:] - octets = 64 + t * 8 - dsa_q = keyptr[0:20] - keyptr = keyptr[20:] - dsa_p = keyptr[0:octets] - keyptr = keyptr[octets:] - dsa_g = keyptr[0:octets] - keyptr = keyptr[octets:] - dsa_y = keyptr[0:octets] - return cls( - key=dsa.DSAPublicNumbers( # type: ignore - int.from_bytes(dsa_y, "big"), - dsa.DSAParameterNumbers( - int.from_bytes(dsa_p, "big"), - int.from_bytes(dsa_q, "big"), - int.from_bytes(dsa_g, "big"), - ), - ).public_key(default_backend()), - ) - - -class PrivateDSA(CryptographyPrivateKey): - key: dsa.DSAPrivateKey - key_cls = dsa.DSAPrivateKey - public_cls = PublicDSA - - def sign( - self, - data: bytes, - verify: bool = False, - deterministic: bool = True, - ) -> bytes: - """Sign using a private key per RFC 2536, section 3.""" - public_dsa_key = self.key.public_key() - if public_dsa_key.key_size > 1024: - raise ValueError("DSA key size overflow") - der_signature = self.key.sign(data, self.public_cls.chosen_hash) - dsa_r, dsa_s = utils.decode_dss_signature(der_signature) - dsa_t = (public_dsa_key.key_size // 8 - 64) // 8 - octets = 20 - signature = ( - struct.pack("!B", dsa_t) - + int.to_bytes(dsa_r, length=octets, byteorder="big") - + int.to_bytes(dsa_s, length=octets, byteorder="big") - ) - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls, key_size: int) -> "PrivateDSA": - return cls( - key=dsa.generate_private_key(key_size=key_size), - ) - - -class PublicDSANSEC3SHA1(PublicDSA): - algorithm = Algorithm.DSANSEC3SHA1 - - -class PrivateDSANSEC3SHA1(PrivateDSA): - public_cls = PublicDSANSEC3SHA1 diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/ecdsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/ecdsa.py deleted file mode 100644 index 86d5764c..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssecalgs/ecdsa.py +++ /dev/null @@ -1,97 +0,0 @@ -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ec, utils - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicECDSA(CryptographyPublicKey): - key: ec.EllipticCurvePublicKey - key_cls = ec.EllipticCurvePublicKey - algorithm: Algorithm - chosen_hash: hashes.HashAlgorithm - curve: ec.EllipticCurve - octets: int - - def verify(self, signature: bytes, data: bytes) -> None: - sig_r = signature[0 : self.octets] - sig_s = signature[self.octets :] - sig = utils.encode_dss_signature( - int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") - ) - self.key.verify(sig, data, ec.ECDSA(self.chosen_hash)) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 6605, section 4.""" - pn = self.key.public_numbers() - return pn.x.to_bytes(self.octets, "big") + pn.y.to_bytes(self.octets, "big") - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicECDSA": - cls._ensure_algorithm_key_combination(key) - ecdsa_x = key.key[0 : cls.octets] - ecdsa_y = key.key[cls.octets : cls.octets * 2] - return cls( - key=ec.EllipticCurvePublicNumbers( - curve=cls.curve, - x=int.from_bytes(ecdsa_x, "big"), - y=int.from_bytes(ecdsa_y, "big"), - ).public_key(default_backend()), - ) - - -class PrivateECDSA(CryptographyPrivateKey): - key: ec.EllipticCurvePrivateKey - key_cls = ec.EllipticCurvePrivateKey - public_cls = PublicECDSA - - def sign( - self, - data: bytes, - verify: bool = False, - deterministic: bool = True, - ) -> bytes: - """Sign using a private key per RFC 6605, section 4.""" - algorithm = ec.ECDSA( - self.public_cls.chosen_hash, deterministic_signing=deterministic - ) - der_signature = self.key.sign(data, algorithm) - dsa_r, dsa_s = utils.decode_dss_signature(der_signature) - signature = int.to_bytes( - dsa_r, length=self.public_cls.octets, byteorder="big" - ) + int.to_bytes(dsa_s, length=self.public_cls.octets, byteorder="big") - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls) -> "PrivateECDSA": - return cls( - key=ec.generate_private_key( - curve=cls.public_cls.curve, backend=default_backend() - ), - ) - - -class PublicECDSAP256SHA256(PublicECDSA): - algorithm = Algorithm.ECDSAP256SHA256 - chosen_hash = hashes.SHA256() - curve = ec.SECP256R1() - octets = 32 - - -class PrivateECDSAP256SHA256(PrivateECDSA): - public_cls = PublicECDSAP256SHA256 - - -class PublicECDSAP384SHA384(PublicECDSA): - algorithm = Algorithm.ECDSAP384SHA384 - chosen_hash = hashes.SHA384() - curve = ec.SECP384R1() - octets = 48 - - -class PrivateECDSAP384SHA384(PrivateECDSA): - public_cls = PublicECDSAP384SHA384 diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/eddsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/eddsa.py deleted file mode 100644 index 604bcbfe..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssecalgs/eddsa.py +++ /dev/null @@ -1,70 +0,0 @@ -from typing import Type - -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import ed448, ed25519 - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicEDDSA(CryptographyPublicKey): - def verify(self, signature: bytes, data: bytes) -> None: - self.key.verify(signature, data) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 8080, section 3.""" - return self.key.public_bytes( - encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw - ) - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicEDDSA": - cls._ensure_algorithm_key_combination(key) - return cls( - key=cls.key_cls.from_public_bytes(key.key), - ) - - -class PrivateEDDSA(CryptographyPrivateKey): - public_cls: Type[PublicEDDSA] - - def sign( - self, - data: bytes, - verify: bool = False, - deterministic: bool = True, - ) -> bytes: - """Sign using a private key per RFC 8080, section 4.""" - signature = self.key.sign(data) - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls) -> "PrivateEDDSA": - return cls(key=cls.key_cls.generate()) - - -class PublicED25519(PublicEDDSA): - key: ed25519.Ed25519PublicKey - key_cls = ed25519.Ed25519PublicKey - algorithm = Algorithm.ED25519 - - -class PrivateED25519(PrivateEDDSA): - key: ed25519.Ed25519PrivateKey - key_cls = ed25519.Ed25519PrivateKey - public_cls = PublicED25519 - - -class PublicED448(PublicEDDSA): - key: ed448.Ed448PublicKey - key_cls = ed448.Ed448PublicKey - algorithm = Algorithm.ED448 - - -class PrivateED448(PrivateEDDSA): - key: ed448.Ed448PrivateKey - key_cls = ed448.Ed448PrivateKey - public_cls = PublicED448 diff --git a/venv/lib/python3.12/site-packages/dns/dnssecalgs/rsa.py b/venv/lib/python3.12/site-packages/dns/dnssecalgs/rsa.py deleted file mode 100644 index 27537aad..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssecalgs/rsa.py +++ /dev/null @@ -1,124 +0,0 @@ -import math -import struct - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import padding, rsa - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicRSA(CryptographyPublicKey): - key: rsa.RSAPublicKey - key_cls = rsa.RSAPublicKey - algorithm: Algorithm - chosen_hash: hashes.HashAlgorithm - - def verify(self, signature: bytes, data: bytes) -> None: - self.key.verify(signature, data, padding.PKCS1v15(), self.chosen_hash) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 3110, section 2.""" - pn = self.key.public_numbers() - _exp_len = math.ceil(int.bit_length(pn.e) / 8) - exp = int.to_bytes(pn.e, length=_exp_len, byteorder="big") - if _exp_len > 255: - exp_header = b"\0" + struct.pack("!H", _exp_len) - else: - exp_header = struct.pack("!B", _exp_len) - if pn.n.bit_length() < 512 or pn.n.bit_length() > 4096: - raise ValueError("unsupported RSA key length") - return exp_header + exp + pn.n.to_bytes((pn.n.bit_length() + 7) // 8, "big") - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicRSA": - cls._ensure_algorithm_key_combination(key) - keyptr = key.key - (bytes_,) = struct.unpack("!B", keyptr[0:1]) - keyptr = keyptr[1:] - if bytes_ == 0: - (bytes_,) = struct.unpack("!H", keyptr[0:2]) - keyptr = keyptr[2:] - rsa_e = keyptr[0:bytes_] - rsa_n = keyptr[bytes_:] - return cls( - key=rsa.RSAPublicNumbers( - int.from_bytes(rsa_e, "big"), int.from_bytes(rsa_n, "big") - ).public_key(default_backend()) - ) - - -class PrivateRSA(CryptographyPrivateKey): - key: rsa.RSAPrivateKey - key_cls = rsa.RSAPrivateKey - public_cls = PublicRSA - default_public_exponent = 65537 - - def sign( - self, - data: bytes, - verify: bool = False, - deterministic: bool = True, - ) -> bytes: - """Sign using a private key per RFC 3110, section 3.""" - signature = self.key.sign(data, padding.PKCS1v15(), self.public_cls.chosen_hash) - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls, key_size: int) -> "PrivateRSA": - return cls( - key=rsa.generate_private_key( - public_exponent=cls.default_public_exponent, - key_size=key_size, - backend=default_backend(), - ) - ) - - -class PublicRSAMD5(PublicRSA): - algorithm = Algorithm.RSAMD5 - chosen_hash = hashes.MD5() - - -class PrivateRSAMD5(PrivateRSA): - public_cls = PublicRSAMD5 - - -class PublicRSASHA1(PublicRSA): - algorithm = Algorithm.RSASHA1 - chosen_hash = hashes.SHA1() - - -class PrivateRSASHA1(PrivateRSA): - public_cls = PublicRSASHA1 - - -class PublicRSASHA1NSEC3SHA1(PublicRSA): - algorithm = Algorithm.RSASHA1NSEC3SHA1 - chosen_hash = hashes.SHA1() - - -class PrivateRSASHA1NSEC3SHA1(PrivateRSA): - public_cls = PublicRSASHA1NSEC3SHA1 - - -class PublicRSASHA256(PublicRSA): - algorithm = Algorithm.RSASHA256 - chosen_hash = hashes.SHA256() - - -class PrivateRSASHA256(PrivateRSA): - public_cls = PublicRSASHA256 - - -class PublicRSASHA512(PublicRSA): - algorithm = Algorithm.RSASHA512 - chosen_hash = hashes.SHA512() - - -class PrivateRSASHA512(PrivateRSA): - public_cls = PublicRSASHA512 diff --git a/venv/lib/python3.12/site-packages/dns/dnssectypes.py b/venv/lib/python3.12/site-packages/dns/dnssectypes.py deleted file mode 100644 index 02131e0a..00000000 --- a/venv/lib/python3.12/site-packages/dns/dnssectypes.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNSSEC-related types.""" - -# This is a separate file to avoid import circularity between dns.dnssec and -# the implementations of the DS and DNSKEY types. - -import dns.enum - - -class Algorithm(dns.enum.IntEnum): - RSAMD5 = 1 - DH = 2 - DSA = 3 - ECC = 4 - RSASHA1 = 5 - DSANSEC3SHA1 = 6 - RSASHA1NSEC3SHA1 = 7 - RSASHA256 = 8 - RSASHA512 = 10 - ECCGOST = 12 - ECDSAP256SHA256 = 13 - ECDSAP384SHA384 = 14 - ED25519 = 15 - ED448 = 16 - INDIRECT = 252 - PRIVATEDNS = 253 - PRIVATEOID = 254 - - @classmethod - def _maximum(cls): - return 255 - - -class DSDigest(dns.enum.IntEnum): - """DNSSEC Delegation Signer Digest Algorithm""" - - NULL = 0 - SHA1 = 1 - SHA256 = 2 - GOST = 3 - SHA384 = 4 - - @classmethod - def _maximum(cls): - return 255 - - -class NSEC3Hash(dns.enum.IntEnum): - """NSEC3 hash algorithm""" - - SHA1 = 1 - - @classmethod - def _maximum(cls): - return 255 diff --git a/venv/lib/python3.12/site-packages/dns/e164.py b/venv/lib/python3.12/site-packages/dns/e164.py deleted file mode 100644 index 453736d4..00000000 --- a/venv/lib/python3.12/site-packages/dns/e164.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS E.164 helpers.""" - -from typing import Iterable, Optional, Union - -import dns.exception -import dns.name -import dns.resolver - -#: The public E.164 domain. -public_enum_domain = dns.name.from_text("e164.arpa.") - - -def from_e164( - text: str, origin: Optional[dns.name.Name] = public_enum_domain -) -> dns.name.Name: - """Convert an E.164 number in textual form into a Name object whose - value is the ENUM domain name for that number. - - Non-digits in the text are ignored, i.e. "16505551212", - "+1.650.555.1212" and "1 (650) 555-1212" are all the same. - - *text*, a ``str``, is an E.164 number in textual form. - - *origin*, a ``dns.name.Name``, the domain in which the number - should be constructed. The default is ``e164.arpa.``. - - Returns a ``dns.name.Name``. - """ - - parts = [d for d in text if d.isdigit()] - parts.reverse() - return dns.name.from_text(".".join(parts), origin=origin) - - -def to_e164( - name: dns.name.Name, - origin: Optional[dns.name.Name] = public_enum_domain, - want_plus_prefix: bool = True, -) -> str: - """Convert an ENUM domain name into an E.164 number. - - Note that dnspython does not have any information about preferred - number formats within national numbering plans, so all numbers are - emitted as a simple string of digits, prefixed by a '+' (unless - *want_plus_prefix* is ``False``). - - *name* is a ``dns.name.Name``, the ENUM domain name. - - *origin* is a ``dns.name.Name``, a domain containing the ENUM - domain name. The name is relativized to this domain before being - converted to text. If ``None``, no relativization is done. - - *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of - the returned number. - - Returns a ``str``. - - """ - if origin is not None: - name = name.relativize(origin) - dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] - if len(dlabels) != len(name.labels): - raise dns.exception.SyntaxError("non-digit labels in ENUM domain name") - dlabels.reverse() - text = b"".join(dlabels) - if want_plus_prefix: - text = b"+" + text - return text.decode() - - -def query( - number: str, - domains: Iterable[Union[dns.name.Name, str]], - resolver: Optional[dns.resolver.Resolver] = None, -) -> dns.resolver.Answer: - """Look for NAPTR RRs for the specified number in the specified domains. - - e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) - - *number*, a ``str`` is the number to look for. - - *domains* is an iterable containing ``dns.name.Name`` values. - - *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If - ``None``, the default resolver is used. - """ - - if resolver is None: - resolver = dns.resolver.get_default_resolver() - e_nx = dns.resolver.NXDOMAIN() - for domain in domains: - if isinstance(domain, str): - domain = dns.name.from_text(domain) - qname = dns.e164.from_e164(number, domain) - try: - return resolver.resolve(qname, "NAPTR") - except dns.resolver.NXDOMAIN as e: - e_nx += e - raise e_nx diff --git a/venv/lib/python3.12/site-packages/dns/edns.py b/venv/lib/python3.12/site-packages/dns/edns.py deleted file mode 100644 index f7d9ff99..00000000 --- a/venv/lib/python3.12/site-packages/dns/edns.py +++ /dev/null @@ -1,572 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""EDNS Options""" - -import binascii -import math -import socket -import struct -from typing import Any, Dict, Optional, Union - -import dns.enum -import dns.inet -import dns.rdata -import dns.wire - - -class OptionType(dns.enum.IntEnum): - #: NSID - NSID = 3 - #: DAU - DAU = 5 - #: DHU - DHU = 6 - #: N3U - N3U = 7 - #: ECS (client-subnet) - ECS = 8 - #: EXPIRE - EXPIRE = 9 - #: COOKIE - COOKIE = 10 - #: KEEPALIVE - KEEPALIVE = 11 - #: PADDING - PADDING = 12 - #: CHAIN - CHAIN = 13 - #: EDE (extended-dns-error) - EDE = 15 - #: REPORTCHANNEL - REPORTCHANNEL = 18 - - @classmethod - def _maximum(cls): - return 65535 - - -class Option: - """Base class for all EDNS option types.""" - - def __init__(self, otype: Union[OptionType, str]): - """Initialize an option. - - *otype*, a ``dns.edns.OptionType``, is the option type. - """ - self.otype = OptionType.make(otype) - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - """Convert an option to wire format. - - Returns a ``bytes`` or ``None``. - - """ - raise NotImplementedError # pragma: no cover - - def to_text(self) -> str: - raise NotImplementedError # pragma: no cover - - @classmethod - def from_wire_parser(cls, otype: OptionType, parser: "dns.wire.Parser") -> "Option": - """Build an EDNS option object from wire format. - - *otype*, a ``dns.edns.OptionType``, is the option type. - - *parser*, a ``dns.wire.Parser``, the parser, which should be - restructed to the option length. - - Returns a ``dns.edns.Option``. - """ - raise NotImplementedError # pragma: no cover - - def _cmp(self, other): - """Compare an EDNS option with another option of the same type. - - Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. - """ - wire = self.to_wire() - owire = other.to_wire() - if wire == owire: - return 0 - if wire > owire: - return 1 - return -1 - - def __eq__(self, other): - if not isinstance(other, Option): - return False - if self.otype != other.otype: - return False - return self._cmp(other) == 0 - - def __ne__(self, other): - if not isinstance(other, Option): - return True - if self.otype != other.otype: - return True - return self._cmp(other) != 0 - - def __lt__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) < 0 - - def __le__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) <= 0 - - def __ge__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) >= 0 - - def __gt__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) > 0 - - def __str__(self): - return self.to_text() - - -class GenericOption(Option): # lgtm[py/missing-equals] - """Generic Option Class - - This class is used for EDNS option types for which we have no better - implementation. - """ - - def __init__(self, otype: Union[OptionType, str], data: Union[bytes, str]): - super().__init__(otype) - self.data = dns.rdata.Rdata._as_bytes(data, True) - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - if file: - file.write(self.data) - return None - else: - return self.data - - def to_text(self) -> str: - return "Generic %d" % self.otype - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" - ) -> Option: - return cls(otype, parser.get_remaining()) - - -class ECSOption(Option): # lgtm[py/missing-equals] - """EDNS Client Subnet (ECS, RFC7871)""" - - def __init__(self, address: str, srclen: Optional[int] = None, scopelen: int = 0): - """*address*, a ``str``, is the client address information. - - *srclen*, an ``int``, the source prefix length, which is the - leftmost number of bits of the address to be used for the - lookup. The default is 24 for IPv4 and 56 for IPv6. - - *scopelen*, an ``int``, the scope prefix length. This value - must be 0 in queries, and should be set in responses. - """ - - super().__init__(OptionType.ECS) - af = dns.inet.af_for_address(address) - - if af == socket.AF_INET6: - self.family = 2 - if srclen is None: - srclen = 56 - address = dns.rdata.Rdata._as_ipv6_address(address) - srclen = dns.rdata.Rdata._as_int(srclen, 0, 128) - scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 128) - elif af == socket.AF_INET: - self.family = 1 - if srclen is None: - srclen = 24 - address = dns.rdata.Rdata._as_ipv4_address(address) - srclen = dns.rdata.Rdata._as_int(srclen, 0, 32) - scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 32) - else: # pragma: no cover (this will never happen) - raise ValueError("Bad address family") - - assert srclen is not None - self.address = address - self.srclen = srclen - self.scopelen = scopelen - - addrdata = dns.inet.inet_pton(af, address) - nbytes = int(math.ceil(srclen / 8.0)) - - # Truncate to srclen and pad to the end of the last octet needed - # See RFC section 6 - self.addrdata = addrdata[:nbytes] - nbits = srclen % 8 - if nbits != 0: - last = struct.pack("B", ord(self.addrdata[-1:]) & (0xFF << (8 - nbits))) - self.addrdata = self.addrdata[:-1] + last - - def to_text(self) -> str: - return f"ECS {self.address}/{self.srclen} scope/{self.scopelen}" - - @staticmethod - def from_text(text: str) -> Option: - """Convert a string into a `dns.edns.ECSOption` - - *text*, a `str`, the text form of the option. - - Returns a `dns.edns.ECSOption`. - - Examples: - - >>> import dns.edns - >>> - >>> # basic example - >>> dns.edns.ECSOption.from_text('1.2.3.4/24') - >>> - >>> # also understands scope - >>> dns.edns.ECSOption.from_text('1.2.3.4/24/32') - >>> - >>> # IPv6 - >>> dns.edns.ECSOption.from_text('2001:4b98::1/64/64') - >>> - >>> # it understands results from `dns.edns.ECSOption.to_text()` - >>> dns.edns.ECSOption.from_text('ECS 1.2.3.4/24/32') - """ - optional_prefix = "ECS" - tokens = text.split() - ecs_text = None - if len(tokens) == 1: - ecs_text = tokens[0] - elif len(tokens) == 2: - if tokens[0] != optional_prefix: - raise ValueError(f'could not parse ECS from "{text}"') - ecs_text = tokens[1] - else: - raise ValueError(f'could not parse ECS from "{text}"') - n_slashes = ecs_text.count("/") - if n_slashes == 1: - address, tsrclen = ecs_text.split("/") - tscope = "0" - elif n_slashes == 2: - address, tsrclen, tscope = ecs_text.split("/") - else: - raise ValueError(f'could not parse ECS from "{text}"') - try: - scope = int(tscope) - except ValueError: - raise ValueError("invalid scope " + f'"{tscope}": scope must be an integer') - try: - srclen = int(tsrclen) - except ValueError: - raise ValueError( - "invalid srclen " + f'"{tsrclen}": srclen must be an integer' - ) - return ECSOption(address, srclen, scope) - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - value = ( - struct.pack("!HBB", self.family, self.srclen, self.scopelen) + self.addrdata - ) - if file: - file.write(value) - return None - else: - return value - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" - ) -> Option: - family, src, scope = parser.get_struct("!HBB") - addrlen = int(math.ceil(src / 8.0)) - prefix = parser.get_bytes(addrlen) - if family == 1: - pad = 4 - addrlen - addr = dns.ipv4.inet_ntoa(prefix + b"\x00" * pad) - elif family == 2: - pad = 16 - addrlen - addr = dns.ipv6.inet_ntoa(prefix + b"\x00" * pad) - else: - raise ValueError("unsupported family") - - return cls(addr, src, scope) - - -class EDECode(dns.enum.IntEnum): - OTHER = 0 - UNSUPPORTED_DNSKEY_ALGORITHM = 1 - UNSUPPORTED_DS_DIGEST_TYPE = 2 - STALE_ANSWER = 3 - FORGED_ANSWER = 4 - DNSSEC_INDETERMINATE = 5 - DNSSEC_BOGUS = 6 - SIGNATURE_EXPIRED = 7 - SIGNATURE_NOT_YET_VALID = 8 - DNSKEY_MISSING = 9 - RRSIGS_MISSING = 10 - NO_ZONE_KEY_BIT_SET = 11 - NSEC_MISSING = 12 - CACHED_ERROR = 13 - NOT_READY = 14 - BLOCKED = 15 - CENSORED = 16 - FILTERED = 17 - PROHIBITED = 18 - STALE_NXDOMAIN_ANSWER = 19 - NOT_AUTHORITATIVE = 20 - NOT_SUPPORTED = 21 - NO_REACHABLE_AUTHORITY = 22 - NETWORK_ERROR = 23 - INVALID_DATA = 24 - - @classmethod - def _maximum(cls): - return 65535 - - -class EDEOption(Option): # lgtm[py/missing-equals] - """Extended DNS Error (EDE, RFC8914)""" - - _preserve_case = {"DNSKEY", "DS", "DNSSEC", "RRSIGs", "NSEC", "NXDOMAIN"} - - def __init__(self, code: Union[EDECode, str], text: Optional[str] = None): - """*code*, a ``dns.edns.EDECode`` or ``str``, the info code of the - extended error. - - *text*, a ``str`` or ``None``, specifying additional information about - the error. - """ - - super().__init__(OptionType.EDE) - - self.code = EDECode.make(code) - if text is not None and not isinstance(text, str): - raise ValueError("text must be string or None") - self.text = text - - def to_text(self) -> str: - output = f"EDE {self.code}" - if self.code in EDECode: - desc = EDECode.to_text(self.code) - desc = " ".join( - word if word in self._preserve_case else word.title() - for word in desc.split("_") - ) - output += f" ({desc})" - if self.text is not None: - output += f": {self.text}" - return output - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - value = struct.pack("!H", self.code) - if self.text is not None: - value += self.text.encode("utf8") - - if file: - file.write(value) - return None - else: - return value - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" - ) -> Option: - code = EDECode.make(parser.get_uint16()) - text = parser.get_remaining() - - if text: - if text[-1] == 0: # text MAY be null-terminated - text = text[:-1] - btext = text.decode("utf8") - else: - btext = None - - return cls(code, btext) - - -class NSIDOption(Option): - def __init__(self, nsid: bytes): - super().__init__(OptionType.NSID) - self.nsid = nsid - - def to_wire(self, file: Any = None) -> Optional[bytes]: - if file: - file.write(self.nsid) - return None - else: - return self.nsid - - def to_text(self) -> str: - if all(c >= 0x20 and c <= 0x7E for c in self.nsid): - # All ASCII printable, so it's probably a string. - value = self.nsid.decode() - else: - value = binascii.hexlify(self.nsid).decode() - return f"NSID {value}" - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: dns.wire.Parser - ) -> Option: - return cls(parser.get_remaining()) - - -class CookieOption(Option): - def __init__(self, client: bytes, server: bytes): - super().__init__(dns.edns.OptionType.COOKIE) - self.client = client - self.server = server - if len(client) != 8: - raise ValueError("client cookie must be 8 bytes") - if len(server) != 0 and (len(server) < 8 or len(server) > 32): - raise ValueError("server cookie must be empty or between 8 and 32 bytes") - - def to_wire(self, file: Any = None) -> Optional[bytes]: - if file: - file.write(self.client) - if len(self.server) > 0: - file.write(self.server) - return None - else: - return self.client + self.server - - def to_text(self) -> str: - client = binascii.hexlify(self.client).decode() - if len(self.server) > 0: - server = binascii.hexlify(self.server).decode() - else: - server = "" - return f"COOKIE {client}{server}" - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: dns.wire.Parser - ) -> Option: - return cls(parser.get_bytes(8), parser.get_remaining()) - - -class ReportChannelOption(Option): - # RFC 9567 - def __init__(self, agent_domain: dns.name.Name): - super().__init__(OptionType.REPORTCHANNEL) - self.agent_domain = agent_domain - - def to_wire(self, file: Any = None) -> Optional[bytes]: - return self.agent_domain.to_wire(file) - - def to_text(self) -> str: - return "REPORTCHANNEL " + self.agent_domain.to_text() - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: dns.wire.Parser - ) -> Option: - return cls(parser.get_name()) - - -_type_to_class: Dict[OptionType, Any] = { - OptionType.ECS: ECSOption, - OptionType.EDE: EDEOption, - OptionType.NSID: NSIDOption, - OptionType.COOKIE: CookieOption, - OptionType.REPORTCHANNEL: ReportChannelOption, -} - - -def get_option_class(otype: OptionType) -> Any: - """Return the class for the specified option type. - - The GenericOption class is used if a more specific class is not - known. - """ - - cls = _type_to_class.get(otype) - if cls is None: - cls = GenericOption - return cls - - -def option_from_wire_parser( - otype: Union[OptionType, str], parser: "dns.wire.Parser" -) -> Option: - """Build an EDNS option object from wire format. - - *otype*, an ``int``, is the option type. - - *parser*, a ``dns.wire.Parser``, the parser, which should be - restricted to the option length. - - Returns an instance of a subclass of ``dns.edns.Option``. - """ - otype = OptionType.make(otype) - cls = get_option_class(otype) - return cls.from_wire_parser(otype, parser) - - -def option_from_wire( - otype: Union[OptionType, str], wire: bytes, current: int, olen: int -) -> Option: - """Build an EDNS option object from wire format. - - *otype*, an ``int``, is the option type. - - *wire*, a ``bytes``, is the wire-format message. - - *current*, an ``int``, is the offset in *wire* of the beginning - of the rdata. - - *olen*, an ``int``, is the length of the wire-format option data - - Returns an instance of a subclass of ``dns.edns.Option``. - """ - parser = dns.wire.Parser(wire, current) - with parser.restrict_to(olen): - return option_from_wire_parser(otype, parser) - - -def register_type(implementation: Any, otype: OptionType) -> None: - """Register the implementation of an option type. - - *implementation*, a ``class``, is a subclass of ``dns.edns.Option``. - - *otype*, an ``int``, is the option type. - """ - - _type_to_class[otype] = implementation - - -### BEGIN generated OptionType constants - -NSID = OptionType.NSID -DAU = OptionType.DAU -DHU = OptionType.DHU -N3U = OptionType.N3U -ECS = OptionType.ECS -EXPIRE = OptionType.EXPIRE -COOKIE = OptionType.COOKIE -KEEPALIVE = OptionType.KEEPALIVE -PADDING = OptionType.PADDING -CHAIN = OptionType.CHAIN -EDE = OptionType.EDE -REPORTCHANNEL = OptionType.REPORTCHANNEL - -### END generated OptionType constants diff --git a/venv/lib/python3.12/site-packages/dns/entropy.py b/venv/lib/python3.12/site-packages/dns/entropy.py deleted file mode 100644 index 4dcdc627..00000000 --- a/venv/lib/python3.12/site-packages/dns/entropy.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import hashlib -import os -import random -import threading -import time -from typing import Any, Optional - - -class EntropyPool: - # This is an entropy pool for Python implementations that do not - # have a working SystemRandom. I'm not sure there are any, but - # leaving this code doesn't hurt anything as the library code - # is used if present. - - def __init__(self, seed: Optional[bytes] = None): - self.pool_index = 0 - self.digest: Optional[bytearray] = None - self.next_byte = 0 - self.lock = threading.Lock() - self.hash = hashlib.sha1() - self.hash_len = 20 - self.pool = bytearray(b"\0" * self.hash_len) - if seed is not None: - self._stir(seed) - self.seeded = True - self.seed_pid = os.getpid() - else: - self.seeded = False - self.seed_pid = 0 - - def _stir(self, entropy: bytes) -> None: - for c in entropy: - if self.pool_index == self.hash_len: - self.pool_index = 0 - b = c & 0xFF - self.pool[self.pool_index] ^= b - self.pool_index += 1 - - def stir(self, entropy: bytes) -> None: - with self.lock: - self._stir(entropy) - - def _maybe_seed(self) -> None: - if not self.seeded or self.seed_pid != os.getpid(): - try: - seed = os.urandom(16) - except Exception: # pragma: no cover - try: - with open("/dev/urandom", "rb", 0) as r: - seed = r.read(16) - except Exception: - seed = str(time.time()).encode() - self.seeded = True - self.seed_pid = os.getpid() - self.digest = None - seed = bytearray(seed) - self._stir(seed) - - def random_8(self) -> int: - with self.lock: - self._maybe_seed() - if self.digest is None or self.next_byte == self.hash_len: - self.hash.update(bytes(self.pool)) - self.digest = bytearray(self.hash.digest()) - self._stir(self.digest) - self.next_byte = 0 - value = self.digest[self.next_byte] - self.next_byte += 1 - return value - - def random_16(self) -> int: - return self.random_8() * 256 + self.random_8() - - def random_32(self) -> int: - return self.random_16() * 65536 + self.random_16() - - def random_between(self, first: int, last: int) -> int: - size = last - first + 1 - if size > 4294967296: - raise ValueError("too big") - if size > 65536: - rand = self.random_32 - max = 4294967295 - elif size > 256: - rand = self.random_16 - max = 65535 - else: - rand = self.random_8 - max = 255 - return first + size * rand() // (max + 1) - - -pool = EntropyPool() - -system_random: Optional[Any] -try: - system_random = random.SystemRandom() -except Exception: # pragma: no cover - system_random = None - - -def random_16() -> int: - if system_random is not None: - return system_random.randrange(0, 65536) - else: - return pool.random_16() - - -def between(first: int, last: int) -> int: - if system_random is not None: - return system_random.randrange(first, last + 1) - else: - return pool.random_between(first, last) diff --git a/venv/lib/python3.12/site-packages/dns/enum.py b/venv/lib/python3.12/site-packages/dns/enum.py deleted file mode 100644 index 71461f17..00000000 --- a/venv/lib/python3.12/site-packages/dns/enum.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import enum -from typing import Type, TypeVar, Union - -TIntEnum = TypeVar("TIntEnum", bound="IntEnum") - - -class IntEnum(enum.IntEnum): - @classmethod - def _missing_(cls, value): - cls._check_value(value) - val = int.__new__(cls, value) - val._name_ = cls._extra_to_text(value, None) or f"{cls._prefix()}{value}" - val._value_ = value - return val - - @classmethod - def _check_value(cls, value): - max = cls._maximum() - if not isinstance(value, int): - raise TypeError - if value < 0 or value > max: - name = cls._short_name() - raise ValueError(f"{name} must be an int between >= 0 and <= {max}") - - @classmethod - def from_text(cls: Type[TIntEnum], text: str) -> TIntEnum: - text = text.upper() - try: - return cls[text] - except KeyError: - pass - value = cls._extra_from_text(text) - if value: - return value - prefix = cls._prefix() - if text.startswith(prefix) and text[len(prefix) :].isdigit(): - value = int(text[len(prefix) :]) - cls._check_value(value) - try: - return cls(value) - except ValueError: - return value - raise cls._unknown_exception_class() - - @classmethod - def to_text(cls: Type[TIntEnum], value: int) -> str: - cls._check_value(value) - try: - text = cls(value).name - except ValueError: - text = None - text = cls._extra_to_text(value, text) - if text is None: - text = f"{cls._prefix()}{value}" - return text - - @classmethod - def make(cls: Type[TIntEnum], value: Union[int, str]) -> TIntEnum: - """Convert text or a value into an enumerated type, if possible. - - *value*, the ``int`` or ``str`` to convert. - - Raises a class-specific exception if a ``str`` is provided that - cannot be converted. - - Raises ``ValueError`` if the value is out of range. - - Returns an enumeration from the calling class corresponding to the - value, if one is defined, or an ``int`` otherwise. - """ - - if isinstance(value, str): - return cls.from_text(value) - cls._check_value(value) - return cls(value) - - @classmethod - def _maximum(cls): - raise NotImplementedError # pragma: no cover - - @classmethod - def _short_name(cls): - return cls.__name__.lower() - - @classmethod - def _prefix(cls): - return "" - - @classmethod - def _extra_from_text(cls, text): # pylint: disable=W0613 - return None - - @classmethod - def _extra_to_text(cls, value, current_text): # pylint: disable=W0613 - return current_text - - @classmethod - def _unknown_exception_class(cls): - return ValueError diff --git a/venv/lib/python3.12/site-packages/dns/exception.py b/venv/lib/python3.12/site-packages/dns/exception.py deleted file mode 100644 index 223f2d68..00000000 --- a/venv/lib/python3.12/site-packages/dns/exception.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNS Exceptions. - -Dnspython modules may also define their own exceptions, which will -always be subclasses of ``DNSException``. -""" - - -from typing import Optional, Set - - -class DNSException(Exception): - """Abstract base class shared by all dnspython exceptions. - - It supports two basic modes of operation: - - a) Old/compatible mode is used if ``__init__`` was called with - empty *kwargs*. In compatible mode all *args* are passed - to the standard Python Exception class as before and all *args* are - printed by the standard ``__str__`` implementation. Class variable - ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` - if *args* is empty. - - b) New/parametrized mode is used if ``__init__`` was called with - non-empty *kwargs*. - In the new mode *args* must be empty and all kwargs must match - those set in class variable ``supp_kwargs``. All kwargs are stored inside - ``self.kwargs`` and used in a new ``__str__`` implementation to construct - a formatted message based on the ``fmt`` class variable, a ``string``. - - In the simplest case it is enough to override the ``supp_kwargs`` - and ``fmt`` class variables to get nice parametrized messages. - """ - - msg: Optional[str] = None # non-parametrized message - supp_kwargs: Set[str] = set() # accepted parameters for _fmt_kwargs (sanity check) - fmt: Optional[str] = None # message parametrized with results from _fmt_kwargs - - def __init__(self, *args, **kwargs): - self._check_params(*args, **kwargs) - if kwargs: - # This call to a virtual method from __init__ is ok in our usage - self.kwargs = self._check_kwargs(**kwargs) # lgtm[py/init-calls-subclass] - self.msg = str(self) - else: - self.kwargs = dict() # defined but empty for old mode exceptions - if self.msg is None: - # doc string is better implicit message than empty string - self.msg = self.__doc__ - if args: - super().__init__(*args) - else: - super().__init__(self.msg) - - def _check_params(self, *args, **kwargs): - """Old exceptions supported only args and not kwargs. - - For sanity we do not allow to mix old and new behavior.""" - if args or kwargs: - assert bool(args) != bool( - kwargs - ), "keyword arguments are mutually exclusive with positional args" - - def _check_kwargs(self, **kwargs): - if kwargs: - assert ( - set(kwargs.keys()) == self.supp_kwargs - ), f"following set of keyword args is required: {self.supp_kwargs}" - return kwargs - - def _fmt_kwargs(self, **kwargs): - """Format kwargs before printing them. - - Resulting dictionary has to have keys necessary for str.format call - on fmt class variable. - """ - fmtargs = {} - for kw, data in kwargs.items(): - if isinstance(data, (list, set)): - # convert list of to list of str() - fmtargs[kw] = list(map(str, data)) - if len(fmtargs[kw]) == 1: - # remove list brackets [] from single-item lists - fmtargs[kw] = fmtargs[kw].pop() - else: - fmtargs[kw] = data - return fmtargs - - def __str__(self): - if self.kwargs and self.fmt: - # provide custom message constructed from keyword arguments - fmtargs = self._fmt_kwargs(**self.kwargs) - return self.fmt.format(**fmtargs) - else: - # print *args directly in the same way as old DNSException - return super().__str__() - - -class FormError(DNSException): - """DNS message is malformed.""" - - -class SyntaxError(DNSException): - """Text input is malformed.""" - - -class UnexpectedEnd(SyntaxError): - """Text input ended unexpectedly.""" - - -class TooBig(DNSException): - """The DNS message is too big.""" - - -class Timeout(DNSException): - """The DNS operation timed out.""" - - supp_kwargs = {"timeout"} - fmt = "The DNS operation timed out after {timeout:.3f} seconds" - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - -class UnsupportedAlgorithm(DNSException): - """The DNSSEC algorithm is not supported.""" - - -class AlgorithmKeyMismatch(UnsupportedAlgorithm): - """The DNSSEC algorithm is not supported for the given key type.""" - - -class ValidationFailure(DNSException): - """The DNSSEC signature is invalid.""" - - -class DeniedByPolicy(DNSException): - """Denied by DNSSEC policy.""" - - -class ExceptionWrapper: - def __init__(self, exception_class): - self.exception_class = exception_class - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None and not isinstance(exc_val, self.exception_class): - raise self.exception_class(str(exc_val)) from exc_val - return False diff --git a/venv/lib/python3.12/site-packages/dns/flags.py b/venv/lib/python3.12/site-packages/dns/flags.py deleted file mode 100644 index 4c60be13..00000000 --- a/venv/lib/python3.12/site-packages/dns/flags.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Message Flags.""" - -import enum -from typing import Any - -# Standard DNS flags - - -class Flag(enum.IntFlag): - #: Query Response - QR = 0x8000 - #: Authoritative Answer - AA = 0x0400 - #: Truncated Response - TC = 0x0200 - #: Recursion Desired - RD = 0x0100 - #: Recursion Available - RA = 0x0080 - #: Authentic Data - AD = 0x0020 - #: Checking Disabled - CD = 0x0010 - - -# EDNS flags - - -class EDNSFlag(enum.IntFlag): - #: DNSSEC answer OK - DO = 0x8000 - - -def _from_text(text: str, enum_class: Any) -> int: - flags = 0 - tokens = text.split() - for t in tokens: - flags |= enum_class[t.upper()] - return flags - - -def _to_text(flags: int, enum_class: Any) -> str: - text_flags = [] - for k, v in enum_class.__members__.items(): - if flags & v != 0: - text_flags.append(k) - return " ".join(text_flags) - - -def from_text(text: str) -> int: - """Convert a space-separated list of flag text values into a flags - value. - - Returns an ``int`` - """ - - return _from_text(text, Flag) - - -def to_text(flags: int) -> str: - """Convert a flags value into a space-separated list of flag text - values. - - Returns a ``str``. - """ - - return _to_text(flags, Flag) - - -def edns_from_text(text: str) -> int: - """Convert a space-separated list of EDNS flag text values into a EDNS - flags value. - - Returns an ``int`` - """ - - return _from_text(text, EDNSFlag) - - -def edns_to_text(flags: int) -> str: - """Convert an EDNS flags value into a space-separated list of EDNS flag - text values. - - Returns a ``str``. - """ - - return _to_text(flags, EDNSFlag) - - -### BEGIN generated Flag constants - -QR = Flag.QR -AA = Flag.AA -TC = Flag.TC -RD = Flag.RD -RA = Flag.RA -AD = Flag.AD -CD = Flag.CD - -### END generated Flag constants - -### BEGIN generated EDNSFlag constants - -DO = EDNSFlag.DO - -### END generated EDNSFlag constants diff --git a/venv/lib/python3.12/site-packages/dns/grange.py b/venv/lib/python3.12/site-packages/dns/grange.py deleted file mode 100644 index a967ca41..00000000 --- a/venv/lib/python3.12/site-packages/dns/grange.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2012-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS GENERATE range conversion.""" - -from typing import Tuple - -import dns - - -def from_text(text: str) -> Tuple[int, int, int]: - """Convert the text form of a range in a ``$GENERATE`` statement to an - integer. - - *text*, a ``str``, the textual range in ``$GENERATE`` form. - - Returns a tuple of three ``int`` values ``(start, stop, step)``. - """ - - start = -1 - stop = -1 - step = 1 - cur = "" - state = 0 - # state 0 1 2 - # x - y / z - - if text and text[0] == "-": - raise dns.exception.SyntaxError("Start cannot be a negative number") - - for c in text: - if c == "-" and state == 0: - start = int(cur) - cur = "" - state = 1 - elif c == "/": - stop = int(cur) - cur = "" - state = 2 - elif c.isdigit(): - cur += c - else: - raise dns.exception.SyntaxError(f"Could not parse {c}") - - if state == 0: - raise dns.exception.SyntaxError("no stop value specified") - elif state == 1: - stop = int(cur) - else: - assert state == 2 - step = int(cur) - - assert step >= 1 - assert start >= 0 - if start > stop: - raise dns.exception.SyntaxError("start must be <= stop") - - return (start, stop, step) diff --git a/venv/lib/python3.12/site-packages/dns/immutable.py b/venv/lib/python3.12/site-packages/dns/immutable.py deleted file mode 100644 index 36b0362c..00000000 --- a/venv/lib/python3.12/site-packages/dns/immutable.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import collections.abc -from typing import Any, Callable - -from dns._immutable_ctx import immutable - - -@immutable -class Dict(collections.abc.Mapping): # lgtm[py/missing-equals] - def __init__( - self, - dictionary: Any, - no_copy: bool = False, - map_factory: Callable[[], collections.abc.MutableMapping] = dict, - ): - """Make an immutable dictionary from the specified dictionary. - - If *no_copy* is `True`, then *dictionary* will be wrapped instead - of copied. Only set this if you are sure there will be no external - references to the dictionary. - """ - if no_copy and isinstance(dictionary, collections.abc.MutableMapping): - self._odict = dictionary - else: - self._odict = map_factory() - self._odict.update(dictionary) - self._hash = None - - def __getitem__(self, key): - return self._odict.__getitem__(key) - - def __hash__(self): # pylint: disable=invalid-hash-returned - if self._hash is None: - h = 0 - for key in sorted(self._odict.keys()): - h ^= hash(key) - object.__setattr__(self, "_hash", h) - # this does return an int, but pylint doesn't figure that out - return self._hash - - def __len__(self): - return len(self._odict) - - def __iter__(self): - return iter(self._odict) - - -def constify(o: Any) -> Any: - """ - Convert mutable types to immutable types. - """ - if isinstance(o, bytearray): - return bytes(o) - if isinstance(o, tuple): - try: - hash(o) - return o - except Exception: - return tuple(constify(elt) for elt in o) - if isinstance(o, list): - return tuple(constify(elt) for elt in o) - if isinstance(o, dict): - cdict = dict() - for k, v in o.items(): - cdict[k] = constify(v) - return Dict(cdict, True) - return o diff --git a/venv/lib/python3.12/site-packages/dns/inet.py b/venv/lib/python3.12/site-packages/dns/inet.py deleted file mode 100644 index 4a03f996..00000000 --- a/venv/lib/python3.12/site-packages/dns/inet.py +++ /dev/null @@ -1,197 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Generic Internet address helper functions.""" - -import socket -from typing import Any, Optional, Tuple - -import dns.ipv4 -import dns.ipv6 - -# We assume that AF_INET and AF_INET6 are always defined. We keep -# these here for the benefit of any old code (unlikely though that -# is!). -AF_INET = socket.AF_INET -AF_INET6 = socket.AF_INET6 - - -def inet_pton(family: int, text: str) -> bytes: - """Convert the textual form of a network address into its binary form. - - *family* is an ``int``, the address family. - - *text* is a ``str``, the textual address. - - Raises ``NotImplementedError`` if the address family specified is not - implemented. - - Returns a ``bytes``. - """ - - if family == AF_INET: - return dns.ipv4.inet_aton(text) - elif family == AF_INET6: - return dns.ipv6.inet_aton(text, True) - else: - raise NotImplementedError - - -def inet_ntop(family: int, address: bytes) -> str: - """Convert the binary form of a network address into its textual form. - - *family* is an ``int``, the address family. - - *address* is a ``bytes``, the network address in binary form. - - Raises ``NotImplementedError`` if the address family specified is not - implemented. - - Returns a ``str``. - """ - - if family == AF_INET: - return dns.ipv4.inet_ntoa(address) - elif family == AF_INET6: - return dns.ipv6.inet_ntoa(address) - else: - raise NotImplementedError - - -def af_for_address(text: str) -> int: - """Determine the address family of a textual-form network address. - - *text*, a ``str``, the textual address. - - Raises ``ValueError`` if the address family cannot be determined - from the input. - - Returns an ``int``. - """ - - try: - dns.ipv4.inet_aton(text) - return AF_INET - except Exception: - try: - dns.ipv6.inet_aton(text, True) - return AF_INET6 - except Exception: - raise ValueError - - -def is_multicast(text: str) -> bool: - """Is the textual-form network address a multicast address? - - *text*, a ``str``, the textual address. - - Raises ``ValueError`` if the address family cannot be determined - from the input. - - Returns a ``bool``. - """ - - try: - first = dns.ipv4.inet_aton(text)[0] - return first >= 224 and first <= 239 - except Exception: - try: - first = dns.ipv6.inet_aton(text, True)[0] - return first == 255 - except Exception: - raise ValueError - - -def is_address(text: str) -> bool: - """Is the specified string an IPv4 or IPv6 address? - - *text*, a ``str``, the textual address. - - Returns a ``bool``. - """ - - try: - dns.ipv4.inet_aton(text) - return True - except Exception: - try: - dns.ipv6.inet_aton(text, True) - return True - except Exception: - return False - - -def low_level_address_tuple( - high_tuple: Tuple[str, int], af: Optional[int] = None -) -> Any: - """Given a "high-level" address tuple, i.e. - an (address, port) return the appropriate "low-level" address tuple - suitable for use in socket calls. - - If an *af* other than ``None`` is provided, it is assumed the - address in the high-level tuple is valid and has that af. If af - is ``None``, then af_for_address will be called. - """ - address, port = high_tuple - if af is None: - af = af_for_address(address) - if af == AF_INET: - return (address, port) - elif af == AF_INET6: - i = address.find("%") - if i < 0: - # no scope, shortcut! - return (address, port, 0, 0) - # try to avoid getaddrinfo() - addrpart = address[:i] - scope = address[i + 1 :] - if scope.isdigit(): - return (addrpart, port, 0, int(scope)) - try: - return (addrpart, port, 0, socket.if_nametoindex(scope)) - except AttributeError: # pragma: no cover (we can't really test this) - ai_flags = socket.AI_NUMERICHOST - ((*_, tup), *_) = socket.getaddrinfo(address, port, flags=ai_flags) - return tup - else: - raise NotImplementedError(f"unknown address family {af}") - - -def any_for_af(af): - """Return the 'any' address for the specified address family.""" - if af == socket.AF_INET: - return "0.0.0.0" - elif af == socket.AF_INET6: - return "::" - raise NotImplementedError(f"unknown address family {af}") - - -def canonicalize(text: str) -> str: - """Verify that *address* is a valid text form IPv4 or IPv6 address and return its - canonical text form. IPv6 addresses with scopes are rejected. - - *text*, a ``str``, the address in textual form. - - Raises ``ValueError`` if the text is not valid. - """ - try: - return dns.ipv6.canonicalize(text) - except Exception: - try: - return dns.ipv4.canonicalize(text) - except Exception: - raise ValueError diff --git a/venv/lib/python3.12/site-packages/dns/ipv4.py b/venv/lib/python3.12/site-packages/dns/ipv4.py deleted file mode 100644 index 65ee69c0..00000000 --- a/venv/lib/python3.12/site-packages/dns/ipv4.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""IPv4 helper functions.""" - -import struct -from typing import Union - -import dns.exception - - -def inet_ntoa(address: bytes) -> str: - """Convert an IPv4 address in binary form to text form. - - *address*, a ``bytes``, the IPv4 address in binary form. - - Returns a ``str``. - """ - - if len(address) != 4: - raise dns.exception.SyntaxError - return "%u.%u.%u.%u" % (address[0], address[1], address[2], address[3]) - - -def inet_aton(text: Union[str, bytes]) -> bytes: - """Convert an IPv4 address in text form to binary form. - - *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. - - Returns a ``bytes``. - """ - - if not isinstance(text, bytes): - btext = text.encode() - else: - btext = text - parts = btext.split(b".") - if len(parts) != 4: - raise dns.exception.SyntaxError - for part in parts: - if not part.isdigit(): - raise dns.exception.SyntaxError - if len(part) > 1 and part[0] == ord("0"): - # No leading zeros - raise dns.exception.SyntaxError - try: - b = [int(part) for part in parts] - return struct.pack("BBBB", *b) - except Exception: - raise dns.exception.SyntaxError - - -def canonicalize(text: Union[str, bytes]) -> str: - """Verify that *address* is a valid text form IPv4 address and return its - canonical text form. - - *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. - - Raises ``dns.exception.SyntaxError`` if the text is not valid. - """ - # Note that inet_aton() only accepts canonial form, but we still run through - # inet_ntoa() to ensure the output is a str. - return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) diff --git a/venv/lib/python3.12/site-packages/dns/ipv6.py b/venv/lib/python3.12/site-packages/dns/ipv6.py deleted file mode 100644 index 4dd1d1ca..00000000 --- a/venv/lib/python3.12/site-packages/dns/ipv6.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""IPv6 helper functions.""" - -import binascii -import re -from typing import List, Union - -import dns.exception -import dns.ipv4 - -_leading_zero = re.compile(r"0+([0-9a-f]+)") - - -def inet_ntoa(address: bytes) -> str: - """Convert an IPv6 address in binary form to text form. - - *address*, a ``bytes``, the IPv6 address in binary form. - - Raises ``ValueError`` if the address isn't 16 bytes long. - Returns a ``str``. - """ - - if len(address) != 16: - raise ValueError("IPv6 addresses are 16 bytes long") - hex = binascii.hexlify(address) - chunks = [] - i = 0 - l = len(hex) - while i < l: - chunk = hex[i : i + 4].decode() - # strip leading zeros. we do this with an re instead of - # with lstrip() because lstrip() didn't support chars until - # python 2.2.2 - m = _leading_zero.match(chunk) - if m is not None: - chunk = m.group(1) - chunks.append(chunk) - i += 4 - # - # Compress the longest subsequence of 0-value chunks to :: - # - best_start = 0 - best_len = 0 - start = -1 - last_was_zero = False - for i in range(8): - if chunks[i] != "0": - if last_was_zero: - end = i - current_len = end - start - if current_len > best_len: - best_start = start - best_len = current_len - last_was_zero = False - elif not last_was_zero: - start = i - last_was_zero = True - if last_was_zero: - end = 8 - current_len = end - start - if current_len > best_len: - best_start = start - best_len = current_len - if best_len > 1: - if best_start == 0 and (best_len == 6 or best_len == 5 and chunks[5] == "ffff"): - # We have an embedded IPv4 address - if best_len == 6: - prefix = "::" - else: - prefix = "::ffff:" - thex = prefix + dns.ipv4.inet_ntoa(address[12:]) - else: - thex = ( - ":".join(chunks[:best_start]) - + "::" - + ":".join(chunks[best_start + best_len :]) - ) - else: - thex = ":".join(chunks) - return thex - - -_v4_ending = re.compile(rb"(.*):(\d+\.\d+\.\d+\.\d+)$") -_colon_colon_start = re.compile(rb"::.*") -_colon_colon_end = re.compile(rb".*::$") - - -def inet_aton(text: Union[str, bytes], ignore_scope: bool = False) -> bytes: - """Convert an IPv6 address in text form to binary form. - - *text*, a ``str`` or ``bytes``, the IPv6 address in textual form. - - *ignore_scope*, a ``bool``. If ``True``, a scope will be ignored. - If ``False``, the default, it is an error for a scope to be present. - - Returns a ``bytes``. - """ - - # - # Our aim here is not something fast; we just want something that works. - # - if not isinstance(text, bytes): - btext = text.encode() - else: - btext = text - - if ignore_scope: - parts = btext.split(b"%") - l = len(parts) - if l == 2: - btext = parts[0] - elif l > 2: - raise dns.exception.SyntaxError - - if btext == b"": - raise dns.exception.SyntaxError - elif btext.endswith(b":") and not btext.endswith(b"::"): - raise dns.exception.SyntaxError - elif btext.startswith(b":") and not btext.startswith(b"::"): - raise dns.exception.SyntaxError - elif btext == b"::": - btext = b"0::" - # - # Get rid of the icky dot-quad syntax if we have it. - # - m = _v4_ending.match(btext) - if m is not None: - b = dns.ipv4.inet_aton(m.group(2)) - btext = ( - f"{m.group(1).decode()}:{b[0]:02x}{b[1]:02x}:{b[2]:02x}{b[3]:02x}" - ).encode() - # - # Try to turn '::' into ':'; if no match try to - # turn '::' into ':' - # - m = _colon_colon_start.match(btext) - if m is not None: - btext = btext[1:] - else: - m = _colon_colon_end.match(btext) - if m is not None: - btext = btext[:-1] - # - # Now canonicalize into 8 chunks of 4 hex digits each - # - chunks = btext.split(b":") - l = len(chunks) - if l > 8: - raise dns.exception.SyntaxError - seen_empty = False - canonical: List[bytes] = [] - for c in chunks: - if c == b"": - if seen_empty: - raise dns.exception.SyntaxError - seen_empty = True - for _ in range(0, 8 - l + 1): - canonical.append(b"0000") - else: - lc = len(c) - if lc > 4: - raise dns.exception.SyntaxError - if lc != 4: - c = (b"0" * (4 - lc)) + c - canonical.append(c) - if l < 8 and not seen_empty: - raise dns.exception.SyntaxError - btext = b"".join(canonical) - - # - # Finally we can go to binary. - # - try: - return binascii.unhexlify(btext) - except (binascii.Error, TypeError): - raise dns.exception.SyntaxError - - -_mapped_prefix = b"\x00" * 10 + b"\xff\xff" - - -def is_mapped(address: bytes) -> bool: - """Is the specified address a mapped IPv4 address? - - *address*, a ``bytes`` is an IPv6 address in binary form. - - Returns a ``bool``. - """ - - return address.startswith(_mapped_prefix) - - -def canonicalize(text: Union[str, bytes]) -> str: - """Verify that *address* is a valid text form IPv6 address and return its - canonical text form. Addresses with scopes are rejected. - - *text*, a ``str`` or ``bytes``, the IPv6 address in textual form. - - Raises ``dns.exception.SyntaxError`` if the text is not valid. - """ - return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) diff --git a/venv/lib/python3.12/site-packages/dns/message.py b/venv/lib/python3.12/site-packages/dns/message.py deleted file mode 100644 index e978a0a2..00000000 --- a/venv/lib/python3.12/site-packages/dns/message.py +++ /dev/null @@ -1,1933 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Messages""" - -import contextlib -import enum -import io -import time -from typing import Any, Dict, List, Optional, Tuple, Union, cast - -import dns.edns -import dns.entropy -import dns.enum -import dns.exception -import dns.flags -import dns.name -import dns.opcode -import dns.rcode -import dns.rdata -import dns.rdataclass -import dns.rdatatype -import dns.rdtypes.ANY.OPT -import dns.rdtypes.ANY.TSIG -import dns.renderer -import dns.rrset -import dns.tsig -import dns.ttl -import dns.wire - - -class ShortHeader(dns.exception.FormError): - """The DNS packet passed to from_wire() is too short.""" - - -class TrailingJunk(dns.exception.FormError): - """The DNS packet passed to from_wire() has extra junk at the end of it.""" - - -class UnknownHeaderField(dns.exception.DNSException): - """The header field name was not recognized when converting from text - into a message.""" - - -class BadEDNS(dns.exception.FormError): - """An OPT record occurred somewhere other than - the additional data section.""" - - -class BadTSIG(dns.exception.FormError): - """A TSIG record occurred somewhere other than the end of - the additional data section.""" - - -class UnknownTSIGKey(dns.exception.DNSException): - """A TSIG with an unknown key was received.""" - - -class Truncated(dns.exception.DNSException): - """The truncated flag is set.""" - - supp_kwargs = {"message"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def message(self): - """As much of the message as could be processed. - - Returns a ``dns.message.Message``. - """ - return self.kwargs["message"] - - -class NotQueryResponse(dns.exception.DNSException): - """Message is not a response to a query.""" - - -class ChainTooLong(dns.exception.DNSException): - """The CNAME chain is too long.""" - - -class AnswerForNXDOMAIN(dns.exception.DNSException): - """The rcode is NXDOMAIN but an answer was found.""" - - -class NoPreviousName(dns.exception.SyntaxError): - """No previous name was known.""" - - -class MessageSection(dns.enum.IntEnum): - """Message sections""" - - QUESTION = 0 - ANSWER = 1 - AUTHORITY = 2 - ADDITIONAL = 3 - - @classmethod - def _maximum(cls): - return 3 - - -class MessageError: - def __init__(self, exception: Exception, offset: int): - self.exception = exception - self.offset = offset - - -DEFAULT_EDNS_PAYLOAD = 1232 -MAX_CHAIN = 16 - -IndexKeyType = Tuple[ - int, - dns.name.Name, - dns.rdataclass.RdataClass, - dns.rdatatype.RdataType, - Optional[dns.rdatatype.RdataType], - Optional[dns.rdataclass.RdataClass], -] -IndexType = Dict[IndexKeyType, dns.rrset.RRset] -SectionType = Union[int, str, List[dns.rrset.RRset]] - - -class Message: - """A DNS message.""" - - _section_enum = MessageSection - - def __init__(self, id: Optional[int] = None): - if id is None: - self.id = dns.entropy.random_16() - else: - self.id = id - self.flags = 0 - self.sections: List[List[dns.rrset.RRset]] = [[], [], [], []] - self.opt: Optional[dns.rrset.RRset] = None - self.request_payload = 0 - self.pad = 0 - self.keyring: Any = None - self.tsig: Optional[dns.rrset.RRset] = None - self.request_mac = b"" - self.xfr = False - self.origin: Optional[dns.name.Name] = None - self.tsig_ctx: Optional[Any] = None - self.index: IndexType = {} - self.errors: List[MessageError] = [] - self.time = 0.0 - self.wire: Optional[bytes] = None - - @property - def question(self) -> List[dns.rrset.RRset]: - """The question section.""" - return self.sections[0] - - @question.setter - def question(self, v): - self.sections[0] = v - - @property - def answer(self) -> List[dns.rrset.RRset]: - """The answer section.""" - return self.sections[1] - - @answer.setter - def answer(self, v): - self.sections[1] = v - - @property - def authority(self) -> List[dns.rrset.RRset]: - """The authority section.""" - return self.sections[2] - - @authority.setter - def authority(self, v): - self.sections[2] = v - - @property - def additional(self) -> List[dns.rrset.RRset]: - """The additional data section.""" - return self.sections[3] - - @additional.setter - def additional(self, v): - self.sections[3] = v - - def __repr__(self): - return "" - - def __str__(self): - return self.to_text() - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any], - ) -> str: - """Convert the message to text. - - The *origin*, *relativize*, and any other keyword - arguments are passed to the RRset ``to_wire()`` method. - - Returns a ``str``. - """ - - s = io.StringIO() - s.write("id %d\n" % self.id) - s.write(f"opcode {dns.opcode.to_text(self.opcode())}\n") - s.write(f"rcode {dns.rcode.to_text(self.rcode())}\n") - s.write(f"flags {dns.flags.to_text(self.flags)}\n") - if self.edns >= 0: - s.write(f"edns {self.edns}\n") - if self.ednsflags != 0: - s.write(f"eflags {dns.flags.edns_to_text(self.ednsflags)}\n") - s.write("payload %d\n" % self.payload) - for opt in self.options: - s.write(f"option {opt.to_text()}\n") - for name, which in self._section_enum.__members__.items(): - s.write(f";{name}\n") - for rrset in self.section_from_number(which): - s.write(rrset.to_text(origin, relativize, **kw)) - s.write("\n") - # - # We strip off the final \n so the caller can print the result without - # doing weird things to get around eccentricities in Python print - # formatting - # - return s.getvalue()[:-1] - - def __eq__(self, other): - """Two messages are equal if they have the same content in the - header, question, answer, and authority sections. - - Returns a ``bool``. - """ - - if not isinstance(other, Message): - return False - if self.id != other.id: - return False - if self.flags != other.flags: - return False - for i, section in enumerate(self.sections): - other_section = other.sections[i] - for n in section: - if n not in other_section: - return False - for n in other_section: - if n not in section: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def is_response(self, other: "Message") -> bool: - """Is *other*, also a ``dns.message.Message``, a response to this - message? - - Returns a ``bool``. - """ - - if ( - other.flags & dns.flags.QR == 0 - or self.id != other.id - or dns.opcode.from_flags(self.flags) != dns.opcode.from_flags(other.flags) - ): - return False - if other.rcode() in { - dns.rcode.FORMERR, - dns.rcode.SERVFAIL, - dns.rcode.NOTIMP, - dns.rcode.REFUSED, - }: - # We don't check the question section in these cases if - # the other question section is empty, even though they - # still really ought to have a question section. - if len(other.question) == 0: - return True - if dns.opcode.is_update(self.flags): - # This is assuming the "sender doesn't include anything - # from the update", but we don't care to check the other - # case, which is that all the sections are returned and - # identical. - return True - for n in self.question: - if n not in other.question: - return False - for n in other.question: - if n not in self.question: - return False - return True - - def section_number(self, section: List[dns.rrset.RRset]) -> int: - """Return the "section number" of the specified section for use - in indexing. - - *section* is one of the section attributes of this message. - - Raises ``ValueError`` if the section isn't known. - - Returns an ``int``. - """ - - for i, our_section in enumerate(self.sections): - if section is our_section: - return self._section_enum(i) - raise ValueError("unknown section") - - def section_from_number(self, number: int) -> List[dns.rrset.RRset]: - """Return the section list associated with the specified section - number. - - *number* is a section number `int` or the text form of a section - name. - - Raises ``ValueError`` if the section isn't known. - - Returns a ``list``. - """ - - section = self._section_enum.make(number) - return self.sections[section] - - def find_rrset( - self, - section: SectionType, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - deleting: Optional[dns.rdataclass.RdataClass] = None, - create: bool = False, - force_unique: bool = False, - idna_codec: Optional[dns.name.IDNACodec] = None, - ) -> dns.rrset.RRset: - """Find the RRset with the given attributes in the specified section. - - *section*, an ``int`` section number, a ``str`` section name, or one of - the section attributes of this message. This specifies the - the section of the message to search. For example:: - - my_message.find_rrset(my_message.answer, name, rdclass, rdtype) - my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) - my_message.find_rrset("ANSWER", name, rdclass, rdtype) - - *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. - - *rdclass*, an ``int`` or ``str``, the class of the RRset. - - *rdtype*, an ``int`` or ``str``, the type of the RRset. - - *covers*, an ``int`` or ``str``, the covers value of the RRset. - The default is ``dns.rdatatype.NONE``. - - *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the - RRset. The default is ``None``. - - *create*, a ``bool``. If ``True``, create the RRset if it is not found. - The created RRset is appended to *section*. - - *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, - create a new RRset regardless of whether a matching RRset exists - already. The default is ``False``. This is useful when creating - DDNS Update messages, as order matters for them. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Raises ``KeyError`` if the RRset was not found and create was - ``False``. - - Returns a ``dns.rrset.RRset object``. - """ - - if isinstance(section, int): - section_number = section - section = self.section_from_number(section_number) - elif isinstance(section, str): - section_number = self._section_enum.from_text(section) - section = self.section_from_number(section_number) - else: - section_number = self.section_number(section) - if isinstance(name, str): - name = dns.name.from_text(name, idna_codec=idna_codec) - rdtype = dns.rdatatype.RdataType.make(rdtype) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - covers = dns.rdatatype.RdataType.make(covers) - if deleting is not None: - deleting = dns.rdataclass.RdataClass.make(deleting) - key = (section_number, name, rdclass, rdtype, covers, deleting) - if not force_unique: - if self.index is not None: - rrset = self.index.get(key) - if rrset is not None: - return rrset - else: - for rrset in section: - if rrset.full_match(name, rdclass, rdtype, covers, deleting): - return rrset - if not create: - raise KeyError - rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) - section.append(rrset) - if self.index is not None: - self.index[key] = rrset - return rrset - - def get_rrset( - self, - section: SectionType, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - deleting: Optional[dns.rdataclass.RdataClass] = None, - create: bool = False, - force_unique: bool = False, - idna_codec: Optional[dns.name.IDNACodec] = None, - ) -> Optional[dns.rrset.RRset]: - """Get the RRset with the given attributes in the specified section. - - If the RRset is not found, None is returned. - - *section*, an ``int`` section number, a ``str`` section name, or one of - the section attributes of this message. This specifies the - the section of the message to search. For example:: - - my_message.get_rrset(my_message.answer, name, rdclass, rdtype) - my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) - my_message.get_rrset("ANSWER", name, rdclass, rdtype) - - *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. - - *rdclass*, an ``int`` or ``str``, the class of the RRset. - - *rdtype*, an ``int`` or ``str``, the type of the RRset. - - *covers*, an ``int`` or ``str``, the covers value of the RRset. - The default is ``dns.rdatatype.NONE``. - - *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the - RRset. The default is ``None``. - - *create*, a ``bool``. If ``True``, create the RRset if it is not found. - The created RRset is appended to *section*. - - *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, - create a new RRset regardless of whether a matching RRset exists - already. The default is ``False``. This is useful when creating - DDNS Update messages, as order matters for them. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.rrset.RRset object`` or ``None``. - """ - - try: - rrset = self.find_rrset( - section, - name, - rdclass, - rdtype, - covers, - deleting, - create, - force_unique, - idna_codec, - ) - except KeyError: - rrset = None - return rrset - - def section_count(self, section: SectionType) -> int: - """Returns the number of records in the specified section. - - *section*, an ``int`` section number, a ``str`` section name, or one of - the section attributes of this message. This specifies the - the section of the message to count. For example:: - - my_message.section_count(my_message.answer) - my_message.section_count(dns.message.ANSWER) - my_message.section_count("ANSWER") - """ - - if isinstance(section, int): - section_number = section - section = self.section_from_number(section_number) - elif isinstance(section, str): - section_number = self._section_enum.from_text(section) - section = self.section_from_number(section_number) - else: - section_number = self.section_number(section) - count = sum(max(1, len(rrs)) for rrs in section) - if section_number == MessageSection.ADDITIONAL: - if self.opt is not None: - count += 1 - if self.tsig is not None: - count += 1 - return count - - def _compute_opt_reserve(self) -> int: - """Compute the size required for the OPT RR, padding excluded""" - if not self.opt: - return 0 - # 1 byte for the root name, 10 for the standard RR fields - size = 11 - # This would be more efficient if options had a size() method, but we won't - # worry about that for now. We also don't worry if there is an existing padding - # option, as it is unlikely and probably harmless, as the worst case is that we - # may add another, and this seems to be legal. - for option in self.opt[0].options: - wire = option.to_wire() - # We add 4 here to account for the option type and length - size += len(wire) + 4 - if self.pad: - # Padding will be added, so again add the option type and length. - size += 4 - return size - - def _compute_tsig_reserve(self) -> int: - """Compute the size required for the TSIG RR""" - # This would be more efficient if TSIGs had a size method, but we won't - # worry about for now. Also, we can't really cope with the potential - # compressibility of the TSIG owner name, so we estimate with the uncompressed - # size. We will disable compression when TSIG and padding are both is active - # so that the padding comes out right. - if not self.tsig: - return 0 - f = io.BytesIO() - self.tsig.to_wire(f) - return len(f.getvalue()) - - def to_wire( - self, - origin: Optional[dns.name.Name] = None, - max_size: int = 0, - multi: bool = False, - tsig_ctx: Optional[Any] = None, - prepend_length: bool = False, - prefer_truncation: bool = False, - **kw: Dict[str, Any], - ) -> bytes: - """Return a string containing the message in DNS compressed wire - format. - - Additional keyword arguments are passed to the RRset ``to_wire()`` - method. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended - to any relative names. If ``None``, and the message has an origin - attribute that is not ``None``, then it will be used. - - *max_size*, an ``int``, the maximum size of the wire format - output; default is 0, which means "the message's request - payload, if nonzero, or 65535". - - *multi*, a ``bool``, should be set to ``True`` if this message is - part of a multiple message sequence. - - *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the - ongoing TSIG context, used when signing zone transfers. - - *prepend_length*, a ``bool``, should be set to ``True`` if the caller - wants the message length prepended to the message itself. This is - useful for messages sent over TCP, TLS (DoT), or QUIC (DoQ). - - *prefer_truncation*, a ``bool``, should be set to ``True`` if the caller - wants the message to be truncated if it would otherwise exceed the - maximum length. If the truncation occurs before the additional section, - the TC bit will be set. - - Raises ``dns.exception.TooBig`` if *max_size* was exceeded. - - Returns a ``bytes``. - """ - - if origin is None and self.origin is not None: - origin = self.origin - if max_size == 0: - if self.request_payload != 0: - max_size = self.request_payload - else: - max_size = 65535 - if max_size < 512: - max_size = 512 - elif max_size > 65535: - max_size = 65535 - r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) - opt_reserve = self._compute_opt_reserve() - r.reserve(opt_reserve) - tsig_reserve = self._compute_tsig_reserve() - r.reserve(tsig_reserve) - try: - for rrset in self.question: - r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) - for rrset in self.answer: - r.add_rrset(dns.renderer.ANSWER, rrset, **kw) - for rrset in self.authority: - r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) - for rrset in self.additional: - r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) - except dns.exception.TooBig: - if prefer_truncation: - if r.section < dns.renderer.ADDITIONAL: - r.flags |= dns.flags.TC - else: - raise - r.release_reserved() - if self.opt is not None: - r.add_opt(self.opt, self.pad, opt_reserve, tsig_reserve) - r.write_header() - if self.tsig is not None: - (new_tsig, ctx) = dns.tsig.sign( - r.get_wire(), - self.keyring, - self.tsig[0], - int(time.time()), - self.request_mac, - tsig_ctx, - multi, - ) - self.tsig.clear() - self.tsig.add(new_tsig) - r.add_rrset(dns.renderer.ADDITIONAL, self.tsig) - r.write_header() - if multi: - self.tsig_ctx = ctx - wire = r.get_wire() - self.wire = wire - if prepend_length: - wire = len(wire).to_bytes(2, "big") + wire - return wire - - @staticmethod - def _make_tsig( - keyname, algorithm, time_signed, fudge, mac, original_id, error, other - ): - tsig = dns.rdtypes.ANY.TSIG.TSIG( - dns.rdataclass.ANY, - dns.rdatatype.TSIG, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ) - return dns.rrset.from_rdata(keyname, 0, tsig) - - def use_tsig( - self, - keyring: Any, - keyname: Optional[Union[dns.name.Name, str]] = None, - fudge: int = 300, - original_id: Optional[int] = None, - tsig_error: int = 0, - other_data: bytes = b"", - algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, - ) -> None: - """When sending, a TSIG signature using the specified key - should be added. - - *key*, a ``dns.tsig.Key`` is the key to use. If a key is specified, - the *keyring* and *algorithm* fields are not used. - - *keyring*, a ``dict``, ``callable`` or ``dns.tsig.Key``, is either - the TSIG keyring or key to use. - - The format of a keyring dict is a mapping from TSIG key name, as - ``dns.name.Name`` to ``dns.tsig.Key`` or a TSIG secret, a ``bytes``. - If a ``dict`` *keyring* is specified but a *keyname* is not, the key - used will be the first key in the *keyring*. Note that the order of - keys in a dictionary is not defined, so applications should supply a - keyname when a ``dict`` keyring is used, unless they know the keyring - contains only one key. If a ``callable`` keyring is specified, the - callable will be called with the message and the keyname, and is - expected to return a key. - - *keyname*, a ``dns.name.Name``, ``str`` or ``None``, the name of - this TSIG key to use; defaults to ``None``. If *keyring* is a - ``dict``, the key must be defined in it. If *keyring* is a - ``dns.tsig.Key``, this is ignored. - - *fudge*, an ``int``, the TSIG time fudge. - - *original_id*, an ``int``, the TSIG original id. If ``None``, - the message's id is used. - - *tsig_error*, an ``int``, the TSIG error code. - - *other_data*, a ``bytes``, the TSIG other data. - - *algorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. This is - only used if *keyring* is a ``dict``, and the key entry is a ``bytes``. - """ - - if isinstance(keyring, dns.tsig.Key): - key = keyring - keyname = key.name - elif callable(keyring): - key = keyring(self, keyname) - else: - if isinstance(keyname, str): - keyname = dns.name.from_text(keyname) - if keyname is None: - keyname = next(iter(keyring)) - key = keyring[keyname] - if isinstance(key, bytes): - key = dns.tsig.Key(keyname, key, algorithm) - self.keyring = key - if original_id is None: - original_id = self.id - self.tsig = self._make_tsig( - keyname, - self.keyring.algorithm, - 0, - fudge, - b"\x00" * dns.tsig.mac_sizes[self.keyring.algorithm], - original_id, - tsig_error, - other_data, - ) - - @property - def keyname(self) -> Optional[dns.name.Name]: - if self.tsig: - return self.tsig.name - else: - return None - - @property - def keyalgorithm(self) -> Optional[dns.name.Name]: - if self.tsig: - return self.tsig[0].algorithm - else: - return None - - @property - def mac(self) -> Optional[bytes]: - if self.tsig: - return self.tsig[0].mac - else: - return None - - @property - def tsig_error(self) -> Optional[int]: - if self.tsig: - return self.tsig[0].error - else: - return None - - @property - def had_tsig(self) -> bool: - return bool(self.tsig) - - @staticmethod - def _make_opt(flags=0, payload=DEFAULT_EDNS_PAYLOAD, options=None): - opt = dns.rdtypes.ANY.OPT.OPT(payload, dns.rdatatype.OPT, options or ()) - return dns.rrset.from_rdata(dns.name.root, int(flags), opt) - - def use_edns( - self, - edns: Optional[Union[int, bool]] = 0, - ednsflags: int = 0, - payload: int = DEFAULT_EDNS_PAYLOAD, - request_payload: Optional[int] = None, - options: Optional[List[dns.edns.Option]] = None, - pad: int = 0, - ) -> None: - """Configure EDNS behavior. - - *edns*, an ``int``, is the EDNS level to use. Specifying ``None``, ``False``, - or ``-1`` means "do not use EDNS", and in this case the other parameters are - ignored. Specifying ``True`` is equivalent to specifying 0, i.e. "use EDNS0". - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the maximum - size of UDP datagram the sender can handle. I.e. how big a response to this - message can be. - - *request_payload*, an ``int``, is the EDNS payload size to use when sending this - message. If not specified, defaults to the value of *payload*. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS options. - - *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add - padding bytes to make the message size a multiple of *pad*. Note that if - padding is non-zero, an EDNS PADDING option will always be added to the - message. - """ - - if edns is None or edns is False: - edns = -1 - elif edns is True: - edns = 0 - if edns < 0: - self.opt = None - self.request_payload = 0 - else: - # make sure the EDNS version in ednsflags agrees with edns - ednsflags &= 0xFF00FFFF - ednsflags |= edns << 16 - if options is None: - options = [] - self.opt = self._make_opt(ednsflags, payload, options) - if request_payload is None: - request_payload = payload - self.request_payload = request_payload - if pad < 0: - raise ValueError("pad must be non-negative") - self.pad = pad - - @property - def edns(self) -> int: - if self.opt: - return (self.ednsflags & 0xFF0000) >> 16 - else: - return -1 - - @property - def ednsflags(self) -> int: - if self.opt: - return self.opt.ttl - else: - return 0 - - @ednsflags.setter - def ednsflags(self, v): - if self.opt: - self.opt.ttl = v - elif v: - self.opt = self._make_opt(v) - - @property - def payload(self) -> int: - if self.opt: - return self.opt[0].payload - else: - return 0 - - @property - def options(self) -> Tuple: - if self.opt: - return self.opt[0].options - else: - return () - - def want_dnssec(self, wanted: bool = True) -> None: - """Enable or disable 'DNSSEC desired' flag in requests. - - *wanted*, a ``bool``. If ``True``, then DNSSEC data is - desired in the response, EDNS is enabled if required, and then - the DO bit is set. If ``False``, the DO bit is cleared if - EDNS is enabled. - """ - - if wanted: - self.ednsflags |= dns.flags.DO - elif self.opt: - self.ednsflags &= ~int(dns.flags.DO) - - def rcode(self) -> dns.rcode.Rcode: - """Return the rcode. - - Returns a ``dns.rcode.Rcode``. - """ - return dns.rcode.from_flags(int(self.flags), int(self.ednsflags)) - - def set_rcode(self, rcode: dns.rcode.Rcode) -> None: - """Set the rcode. - - *rcode*, a ``dns.rcode.Rcode``, is the rcode to set. - """ - (value, evalue) = dns.rcode.to_flags(rcode) - self.flags &= 0xFFF0 - self.flags |= value - self.ednsflags &= 0x00FFFFFF - self.ednsflags |= evalue - - def opcode(self) -> dns.opcode.Opcode: - """Return the opcode. - - Returns a ``dns.opcode.Opcode``. - """ - return dns.opcode.from_flags(int(self.flags)) - - def set_opcode(self, opcode: dns.opcode.Opcode) -> None: - """Set the opcode. - - *opcode*, a ``dns.opcode.Opcode``, is the opcode to set. - """ - self.flags &= 0x87FF - self.flags |= dns.opcode.to_flags(opcode) - - def get_options(self, otype: dns.edns.OptionType) -> List[dns.edns.Option]: - """Return the list of options of the specified type.""" - return [option for option in self.options if option.otype == otype] - - def extended_errors(self) -> List[dns.edns.EDEOption]: - """Return the list of Extended DNS Error (EDE) options in the message""" - return cast(List[dns.edns.EDEOption], self.get_options(dns.edns.OptionType.EDE)) - - def _get_one_rr_per_rrset(self, value): - # What the caller picked is fine. - return value - - # pylint: disable=unused-argument - - def _parse_rr_header(self, section, name, rdclass, rdtype): - return (rdclass, rdtype, None, False) - - # pylint: enable=unused-argument - - def _parse_special_rr_header(self, section, count, position, name, rdclass, rdtype): - if rdtype == dns.rdatatype.OPT: - if ( - section != MessageSection.ADDITIONAL - or self.opt - or name != dns.name.root - ): - raise BadEDNS - elif rdtype == dns.rdatatype.TSIG: - if ( - section != MessageSection.ADDITIONAL - or rdclass != dns.rdatatype.ANY - or position != count - 1 - ): - raise BadTSIG - return (rdclass, rdtype, None, False) - - -class ChainingResult: - """The result of a call to dns.message.QueryMessage.resolve_chaining(). - - The ``answer`` attribute is the answer RRSet, or ``None`` if it doesn't - exist. - - The ``canonical_name`` attribute is the canonical name after all - chaining has been applied (this is the same name as ``rrset.name`` in cases - where rrset is not ``None``). - - The ``minimum_ttl`` attribute is the minimum TTL, i.e. the TTL to - use if caching the data. It is the smallest of all the CNAME TTLs - and either the answer TTL if it exists or the SOA TTL and SOA - minimum values for negative answers. - - The ``cnames`` attribute is a list of all the CNAME RRSets followed to - get to the canonical name. - """ - - def __init__( - self, - canonical_name: dns.name.Name, - answer: Optional[dns.rrset.RRset], - minimum_ttl: int, - cnames: List[dns.rrset.RRset], - ): - self.canonical_name = canonical_name - self.answer = answer - self.minimum_ttl = minimum_ttl - self.cnames = cnames - - -class QueryMessage(Message): - def resolve_chaining(self) -> ChainingResult: - """Follow the CNAME chain in the response to determine the answer - RRset. - - Raises ``dns.message.NotQueryResponse`` if the message is not - a response. - - Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. - - Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN - but an answer was found. - - Raises ``dns.exception.FormError`` if the question count is not 1. - - Returns a ChainingResult object. - """ - if self.flags & dns.flags.QR == 0: - raise NotQueryResponse - if len(self.question) != 1: - raise dns.exception.FormError - question = self.question[0] - qname = question.name - min_ttl = dns.ttl.MAX_TTL - answer = None - count = 0 - cnames = [] - while count < MAX_CHAIN: - try: - answer = self.find_rrset( - self.answer, qname, question.rdclass, question.rdtype - ) - min_ttl = min(min_ttl, answer.ttl) - break - except KeyError: - if question.rdtype != dns.rdatatype.CNAME: - try: - crrset = self.find_rrset( - self.answer, qname, question.rdclass, dns.rdatatype.CNAME - ) - cnames.append(crrset) - min_ttl = min(min_ttl, crrset.ttl) - for rd in crrset: - qname = rd.target - break - count += 1 - continue - except KeyError: - # Exit the chaining loop - break - else: - # Exit the chaining loop - break - if count >= MAX_CHAIN: - raise ChainTooLong - if self.rcode() == dns.rcode.NXDOMAIN and answer is not None: - raise AnswerForNXDOMAIN - if answer is None: - # Further minimize the TTL with NCACHE. - auname = qname - while True: - # Look for an SOA RR whose owner name is a superdomain - # of qname. - try: - srrset = self.find_rrset( - self.authority, auname, question.rdclass, dns.rdatatype.SOA - ) - min_ttl = min(min_ttl, srrset.ttl, srrset[0].minimum) - break - except KeyError: - try: - auname = auname.parent() - except dns.name.NoParent: - break - return ChainingResult(qname, answer, min_ttl, cnames) - - def canonical_name(self) -> dns.name.Name: - """Return the canonical name of the first name in the question - section. - - Raises ``dns.message.NotQueryResponse`` if the message is not - a response. - - Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. - - Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN - but an answer was found. - - Raises ``dns.exception.FormError`` if the question count is not 1. - """ - return self.resolve_chaining().canonical_name - - -def _maybe_import_update(): - # We avoid circular imports by doing this here. We do it in another - # function as doing it in _message_factory_from_opcode() makes "dns" - # a local symbol, and the first line fails :) - - # pylint: disable=redefined-outer-name,import-outside-toplevel,unused-import - import dns.update # noqa: F401 - - -def _message_factory_from_opcode(opcode): - if opcode == dns.opcode.QUERY: - return QueryMessage - elif opcode == dns.opcode.UPDATE: - _maybe_import_update() - return dns.update.UpdateMessage - else: - return Message - - -class _WireReader: - """Wire format reader. - - parser: the binary parser - message: The message object being built - initialize_message: Callback to set message parsing options - question_only: Are we only reading the question? - one_rr_per_rrset: Put each RR into its own RRset? - keyring: TSIG keyring - ignore_trailing: Ignore trailing junk at end of request? - multi: Is this message part of a multi-message sequence? - DNS dynamic updates. - continue_on_error: try to extract as much information as possible from - the message, accumulating MessageErrors in the *errors* attribute instead of - raising them. - """ - - def __init__( - self, - wire, - initialize_message, - question_only=False, - one_rr_per_rrset=False, - ignore_trailing=False, - keyring=None, - multi=False, - continue_on_error=False, - ): - self.parser = dns.wire.Parser(wire) - self.message = None - self.initialize_message = initialize_message - self.question_only = question_only - self.one_rr_per_rrset = one_rr_per_rrset - self.ignore_trailing = ignore_trailing - self.keyring = keyring - self.multi = multi - self.continue_on_error = continue_on_error - self.errors = [] - - def _get_question(self, section_number, qcount): - """Read the next *qcount* records from the wire data and add them to - the question section. - """ - assert self.message is not None - section = self.message.sections[section_number] - for _ in range(qcount): - qname = self.parser.get_name(self.message.origin) - (rdtype, rdclass) = self.parser.get_struct("!HH") - (rdclass, rdtype, _, _) = self.message._parse_rr_header( - section_number, qname, rdclass, rdtype - ) - self.message.find_rrset( - section, qname, rdclass, rdtype, create=True, force_unique=True - ) - - def _add_error(self, e): - self.errors.append(MessageError(e, self.parser.current)) - - def _get_section(self, section_number, count): - """Read the next I{count} records from the wire data and add them to - the specified section. - - section_number: the section of the message to which to add records - count: the number of records to read - """ - assert self.message is not None - section = self.message.sections[section_number] - force_unique = self.one_rr_per_rrset - for i in range(count): - rr_start = self.parser.current - absolute_name = self.parser.get_name() - if self.message.origin is not None: - name = absolute_name.relativize(self.message.origin) - else: - name = absolute_name - (rdtype, rdclass, ttl, rdlen) = self.parser.get_struct("!HHIH") - if rdtype in (dns.rdatatype.OPT, dns.rdatatype.TSIG): - ( - rdclass, - rdtype, - deleting, - empty, - ) = self.message._parse_special_rr_header( - section_number, count, i, name, rdclass, rdtype - ) - else: - (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( - section_number, name, rdclass, rdtype - ) - rdata_start = self.parser.current - try: - if empty: - if rdlen > 0: - raise dns.exception.FormError - rd = None - covers = dns.rdatatype.NONE - else: - with self.parser.restrict_to(rdlen): - rd = dns.rdata.from_wire_parser( - rdclass, rdtype, self.parser, self.message.origin - ) - covers = rd.covers() - if self.message.xfr and rdtype == dns.rdatatype.SOA: - force_unique = True - if rdtype == dns.rdatatype.OPT: - self.message.opt = dns.rrset.from_rdata(name, ttl, rd) - elif rdtype == dns.rdatatype.TSIG: - if self.keyring is None or self.keyring is True: - raise UnknownTSIGKey("got signed message without keyring") - elif isinstance(self.keyring, dict): - key = self.keyring.get(absolute_name) - if isinstance(key, bytes): - key = dns.tsig.Key(absolute_name, key, rd.algorithm) - elif callable(self.keyring): - key = self.keyring(self.message, absolute_name) - else: - key = self.keyring - if key is None: - raise UnknownTSIGKey(f"key '{name}' unknown") - if key: - self.message.keyring = key - self.message.tsig_ctx = dns.tsig.validate( - self.parser.wire, - key, - absolute_name, - rd, - int(time.time()), - self.message.request_mac, - rr_start, - self.message.tsig_ctx, - self.multi, - ) - self.message.tsig = dns.rrset.from_rdata(absolute_name, 0, rd) - else: - rrset = self.message.find_rrset( - section, - name, - rdclass, - rdtype, - covers, - deleting, - True, - force_unique, - ) - if rd is not None: - if ttl > 0x7FFFFFFF: - ttl = 0 - rrset.add(rd, ttl) - except Exception as e: - if self.continue_on_error: - self._add_error(e) - self.parser.seek(rdata_start + rdlen) - else: - raise - - def read(self): - """Read a wire format DNS message and build a dns.message.Message - object.""" - - if self.parser.remaining() < 12: - raise ShortHeader - (id, flags, qcount, ancount, aucount, adcount) = self.parser.get_struct( - "!HHHHHH" - ) - factory = _message_factory_from_opcode(dns.opcode.from_flags(flags)) - self.message = factory(id=id) - self.message.flags = dns.flags.Flag(flags) - self.message.wire = self.parser.wire - self.initialize_message(self.message) - self.one_rr_per_rrset = self.message._get_one_rr_per_rrset( - self.one_rr_per_rrset - ) - try: - self._get_question(MessageSection.QUESTION, qcount) - if self.question_only: - return self.message - self._get_section(MessageSection.ANSWER, ancount) - self._get_section(MessageSection.AUTHORITY, aucount) - self._get_section(MessageSection.ADDITIONAL, adcount) - if not self.ignore_trailing and self.parser.remaining() != 0: - raise TrailingJunk - if self.multi and self.message.tsig_ctx and not self.message.had_tsig: - self.message.tsig_ctx.update(self.parser.wire) - except Exception as e: - if self.continue_on_error: - self._add_error(e) - else: - raise - return self.message - - -def from_wire( - wire: bytes, - keyring: Optional[Any] = None, - request_mac: Optional[bytes] = b"", - xfr: bool = False, - origin: Optional[dns.name.Name] = None, - tsig_ctx: Optional[Union[dns.tsig.HMACTSig, dns.tsig.GSSTSig]] = None, - multi: bool = False, - question_only: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - continue_on_error: bool = False, -) -> Message: - """Convert a DNS wire format message into a message object. - - *keyring*, a ``dns.tsig.Key``, ``dict``, ``bool``, or ``None``, the key or keyring - to use if the message is signed. If ``None`` or ``True``, then trying to decode - a message with a TSIG will fail as it cannot be validated. If ``False``, then - TSIG validation is disabled. - - *request_mac*, a ``bytes`` or ``None``. If the message is a response to a - TSIG-signed request, *request_mac* should be set to the MAC of that request. - - *xfr*, a ``bool``, should be set to ``True`` if this message is part of a zone - transfer. - - *origin*, a ``dns.name.Name`` or ``None``. If the message is part of a zone - transfer, *origin* should be the origin name of the zone. If not ``None``, names - will be relativized to the origin. - - *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the ongoing TSIG - context, used when validating zone transfers. - - *multi*, a ``bool``, should be set to ``True`` if this message is part of a multiple - message sequence. - - *question_only*, a ``bool``. If ``True``, read only up to the end of the question - section. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the - message. - - *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if the TC bit is - set. - - *continue_on_error*, a ``bool``. If ``True``, try to continue parsing even if - errors occur. Erroneous rdata will be ignored. Errors will be accumulated as a - list of MessageError objects in the message's ``errors`` attribute. This option is - recommended only for DNS analysis tools, or for use in a server as part of an error - handling path. The default is ``False``. - - Raises ``dns.message.ShortHeader`` if the message is less than 12 octets long. - - Raises ``dns.message.TrailingJunk`` if there were octets in the message past the end - of the proper DNS message, and *ignore_trailing* is ``False``. - - Raises ``dns.message.BadEDNS`` if an OPT record was in the wrong section, or - occurred more than once. - - Raises ``dns.message.BadTSIG`` if a TSIG record was not the last record of the - additional data section. - - Raises ``dns.message.Truncated`` if the TC flag is set and *raise_on_truncation* is - ``True``. - - Returns a ``dns.message.Message``. - """ - - # We permit None for request_mac solely for backwards compatibility - if request_mac is None: - request_mac = b"" - - def initialize_message(message): - message.request_mac = request_mac - message.xfr = xfr - message.origin = origin - message.tsig_ctx = tsig_ctx - - reader = _WireReader( - wire, - initialize_message, - question_only, - one_rr_per_rrset, - ignore_trailing, - keyring, - multi, - continue_on_error, - ) - try: - m = reader.read() - except dns.exception.FormError: - if ( - reader.message - and (reader.message.flags & dns.flags.TC) - and raise_on_truncation - ): - raise Truncated(message=reader.message) - else: - raise - # Reading a truncated message might not have any errors, so we - # have to do this check here too. - if m.flags & dns.flags.TC and raise_on_truncation: - raise Truncated(message=m) - if continue_on_error: - m.errors = reader.errors - - return m - - -class _TextReader: - """Text format reader. - - tok: the tokenizer. - message: The message object being built. - DNS dynamic updates. - last_name: The most recently read name when building a message object. - one_rr_per_rrset: Put each RR into its own RRset? - origin: The origin for relative names - relativize: relativize names? - relativize_to: the origin to relativize to. - """ - - def __init__( - self, - text, - idna_codec, - one_rr_per_rrset=False, - origin=None, - relativize=True, - relativize_to=None, - ): - self.message = None - self.tok = dns.tokenizer.Tokenizer(text, idna_codec=idna_codec) - self.last_name = None - self.one_rr_per_rrset = one_rr_per_rrset - self.origin = origin - self.relativize = relativize - self.relativize_to = relativize_to - self.id = None - self.edns = -1 - self.ednsflags = 0 - self.payload = DEFAULT_EDNS_PAYLOAD - self.rcode = None - self.opcode = dns.opcode.QUERY - self.flags = 0 - - def _header_line(self, _): - """Process one line from the text format header section.""" - - token = self.tok.get() - what = token.value - if what == "id": - self.id = self.tok.get_int() - elif what == "flags": - while True: - token = self.tok.get() - if not token.is_identifier(): - self.tok.unget(token) - break - self.flags = self.flags | dns.flags.from_text(token.value) - elif what == "edns": - self.edns = self.tok.get_int() - self.ednsflags = self.ednsflags | (self.edns << 16) - elif what == "eflags": - if self.edns < 0: - self.edns = 0 - while True: - token = self.tok.get() - if not token.is_identifier(): - self.tok.unget(token) - break - self.ednsflags = self.ednsflags | dns.flags.edns_from_text(token.value) - elif what == "payload": - self.payload = self.tok.get_int() - if self.edns < 0: - self.edns = 0 - elif what == "opcode": - text = self.tok.get_string() - self.opcode = dns.opcode.from_text(text) - self.flags = self.flags | dns.opcode.to_flags(self.opcode) - elif what == "rcode": - text = self.tok.get_string() - self.rcode = dns.rcode.from_text(text) - else: - raise UnknownHeaderField - self.tok.get_eol() - - def _question_line(self, section_number): - """Process one line from the text format question section.""" - - section = self.message.sections[section_number] - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = self.tok.as_name( - token, self.message.origin, self.relativize, self.relativize_to - ) - name = self.last_name - if name is None: - raise NoPreviousName - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = dns.rdataclass.IN - # Type - rdtype = dns.rdatatype.from_text(token.value) - (rdclass, rdtype, _, _) = self.message._parse_rr_header( - section_number, name, rdclass, rdtype - ) - self.message.find_rrset( - section, name, rdclass, rdtype, create=True, force_unique=True - ) - self.tok.get_eol() - - def _rr_line(self, section_number): - """Process one line from the text format answer, authority, or - additional data sections. - """ - - section = self.message.sections[section_number] - # Name - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = self.tok.as_name( - token, self.message.origin, self.relativize, self.relativize_to - ) - name = self.last_name - if name is None: - raise NoPreviousName - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - # TTL - try: - ttl = int(token.value, 0) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - ttl = 0 - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = dns.rdataclass.IN - # Type - rdtype = dns.rdatatype.from_text(token.value) - (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( - section_number, name, rdclass, rdtype - ) - token = self.tok.get() - if empty and not token.is_eol_or_eof(): - raise dns.exception.SyntaxError - if not empty and token.is_eol_or_eof(): - raise dns.exception.UnexpectedEnd - if not token.is_eol_or_eof(): - self.tok.unget(token) - rd = dns.rdata.from_text( - rdclass, - rdtype, - self.tok, - self.message.origin, - self.relativize, - self.relativize_to, - ) - covers = rd.covers() - else: - rd = None - covers = dns.rdatatype.NONE - rrset = self.message.find_rrset( - section, - name, - rdclass, - rdtype, - covers, - deleting, - True, - self.one_rr_per_rrset, - ) - if rd is not None: - rrset.add(rd, ttl) - - def _make_message(self): - factory = _message_factory_from_opcode(self.opcode) - message = factory(id=self.id) - message.flags = self.flags - if self.edns >= 0: - message.use_edns(self.edns, self.ednsflags, self.payload) - if self.rcode: - message.set_rcode(self.rcode) - if self.origin: - message.origin = self.origin - return message - - def read(self): - """Read a text format DNS message and build a dns.message.Message - object.""" - - line_method = self._header_line - section_number = None - while 1: - token = self.tok.get(True, True) - if token.is_eol_or_eof(): - break - if token.is_comment(): - u = token.value.upper() - if u == "HEADER": - line_method = self._header_line - - if self.message: - message = self.message - else: - # If we don't have a message, create one with the current - # opcode, so that we know which section names to parse. - message = self._make_message() - try: - section_number = message._section_enum.from_text(u) - # We found a section name. If we don't have a message, - # use the one we just created. - if not self.message: - self.message = message - self.one_rr_per_rrset = message._get_one_rr_per_rrset( - self.one_rr_per_rrset - ) - if section_number == MessageSection.QUESTION: - line_method = self._question_line - else: - line_method = self._rr_line - except Exception: - # It's just a comment. - pass - self.tok.get_eol() - continue - self.tok.unget(token) - line_method(section_number) - if not self.message: - self.message = self._make_message() - return self.message - - -def from_text( - text: str, - idna_codec: Optional[dns.name.IDNACodec] = None, - one_rr_per_rrset: bool = False, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, -) -> Message: - """Convert the text format message into a message object. - - The reader stops after reading the first blank line in the input to - facilitate reading multiple messages from a single file with - ``dns.message.from_file()``. - - *text*, a ``str``, the text format message. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put - into its own rrset. The default is ``False``. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - Raises ``dns.message.UnknownHeaderField`` if a header is unknown. - - Raises ``dns.exception.SyntaxError`` if the text is badly formed. - - Returns a ``dns.message.Message object`` - """ - - # 'text' can also be a file, but we don't publish that fact - # since it's an implementation detail. The official file - # interface is from_file(). - - reader = _TextReader( - text, idna_codec, one_rr_per_rrset, origin, relativize, relativize_to - ) - return reader.read() - - -def from_file( - f: Any, - idna_codec: Optional[dns.name.IDNACodec] = None, - one_rr_per_rrset: bool = False, -) -> Message: - """Read the next text format message from the specified file. - - Message blocks are separated by a single blank line. - - *f*, a ``file`` or ``str``. If *f* is text, it is treated as the - pathname of a file to open. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put - into its own rrset. The default is ``False``. - - Raises ``dns.message.UnknownHeaderField`` if a header is unknown. - - Raises ``dns.exception.SyntaxError`` if the text is badly formed. - - Returns a ``dns.message.Message object`` - """ - - if isinstance(f, str): - cm: contextlib.AbstractContextManager = open(f) - else: - cm = contextlib.nullcontext(f) - with cm as f: - return from_text(f, idna_codec, one_rr_per_rrset) - assert False # for mypy lgtm[py/unreachable-statement] - - -def make_query( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - use_edns: Optional[Union[int, bool]] = None, - want_dnssec: bool = False, - ednsflags: Optional[int] = None, - payload: Optional[int] = None, - request_payload: Optional[int] = None, - options: Optional[List[dns.edns.Option]] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, - id: Optional[int] = None, - flags: int = dns.flags.RD, - pad: int = 0, -) -> QueryMessage: - """Make a query message. - - The query name, type, and class may all be specified either - as objects of the appropriate type, or as strings. - - The query will have a randomly chosen query id, and its DNS flags - will be set to dns.flags.RD. - - qname, a ``dns.name.Name`` or ``str``, the query name. - - *rdtype*, an ``int`` or ``str``, the desired rdata type. - - *rdclass*, an ``int`` or ``str``, the desired rdata class; the default - is class IN. - - *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the - default is ``None``. If ``None``, EDNS will be enabled only if other - parameters (*ednsflags*, *payload*, *request_payload*, or *options*) are - set. - See the description of dns.message.Message.use_edns() for the possible - values for use_edns and their meanings. - - *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the - maximum size of UDP datagram the sender can handle. I.e. how big - a response to this message can be. - - *request_payload*, an ``int``, is the EDNS payload size to use when - sending this message. If not specified, defaults to the value of - *payload*. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS - options. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *id*, an ``int`` or ``None``, the desired query id. The default is - ``None``, which generates a random query id. - - *flags*, an ``int``, the desired query flags. The default is - ``dns.flags.RD``. - - *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add - padding bytes to make the message size a multiple of *pad*. Note that if - padding is non-zero, an EDNS PADDING option will always be added to the - message. - - Returns a ``dns.message.QueryMessage`` - """ - - if isinstance(qname, str): - qname = dns.name.from_text(qname, idna_codec=idna_codec) - rdtype = dns.rdatatype.RdataType.make(rdtype) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - m = QueryMessage(id=id) - m.flags = dns.flags.Flag(flags) - m.find_rrset(m.question, qname, rdclass, rdtype, create=True, force_unique=True) - # only pass keywords on to use_edns if they have been set to a - # non-None value. Setting a field will turn EDNS on if it hasn't - # been configured. - kwargs: Dict[str, Any] = {} - if ednsflags is not None: - kwargs["ednsflags"] = ednsflags - if payload is not None: - kwargs["payload"] = payload - if request_payload is not None: - kwargs["request_payload"] = request_payload - if options is not None: - kwargs["options"] = options - if kwargs and use_edns is None: - use_edns = 0 - kwargs["edns"] = use_edns - kwargs["pad"] = pad - m.use_edns(**kwargs) - m.want_dnssec(want_dnssec) - return m - - -class CopyMode(enum.Enum): - """ - How should sections be copied when making an update response? - """ - - NOTHING = 0 - QUESTION = 1 - EVERYTHING = 2 - - -def make_response( - query: Message, - recursion_available: bool = False, - our_payload: int = 8192, - fudge: int = 300, - tsig_error: int = 0, - pad: Optional[int] = None, - copy_mode: Optional[CopyMode] = None, -) -> Message: - """Make a message which is a response for the specified query. - The message returned is really a response skeleton; it has all of the infrastructure - required of a response, but none of the content. - - Response section(s) which are copied are shallow copies of the matching section(s) - in the query, so the query's RRsets should not be changed. - - *query*, a ``dns.message.Message``, the query to respond to. - - *recursion_available*, a ``bool``, should RA be set in the response? - - *our_payload*, an ``int``, the payload size to advertise in EDNS responses. - - *fudge*, an ``int``, the TSIG time fudge. - - *tsig_error*, an ``int``, the TSIG error. - - *pad*, a non-negative ``int`` or ``None``. If 0, the default, do not pad; otherwise - if not ``None`` add padding bytes to make the message size a multiple of *pad*. Note - that if padding is non-zero, an EDNS PADDING option will always be added to the - message. If ``None``, add padding following RFC 8467, namely if the request is - padded, pad the response to 468 otherwise do not pad. - - *copy_mode*, a ``dns.message.CopyMode`` or ``None``, determines how sections are - copied. The default, ``None`` copies sections according to the default for the - message's opcode, which is currently ``dns.message.CopyMode.QUESTION`` for all - opcodes. ``dns.message.CopyMode.QUESTION`` copies only the question section. - ``dns.message.CopyMode.EVERYTHING`` copies all sections other than OPT or TSIG - records, which are created appropriately if needed. ``dns.message.CopyMode.NOTHING`` - copies no sections; note that this mode is for server testing purposes and is - otherwise not recommended for use. In particular, ``dns.message.is_response()`` - will be ``False`` if you create a response this way and the rcode is not - ``FORMERR``, ``SERVFAIL``, ``NOTIMP``, or ``REFUSED``. - - Returns a ``dns.message.Message`` object whose specific class is appropriate for the - query. For example, if query is a ``dns.update.UpdateMessage``, the response will - be one too. - """ - - if query.flags & dns.flags.QR: - raise dns.exception.FormError("specified query message is not a query") - opcode = query.opcode() - factory = _message_factory_from_opcode(opcode) - response = factory(id=query.id) - response.flags = dns.flags.QR | (query.flags & dns.flags.RD) - if recursion_available: - response.flags |= dns.flags.RA - response.set_opcode(opcode) - if copy_mode is None: - copy_mode = CopyMode.QUESTION - if copy_mode != CopyMode.NOTHING: - response.question = list(query.question) - if copy_mode == CopyMode.EVERYTHING: - response.answer = list(query.answer) - response.authority = list(query.authority) - response.additional = list(query.additional) - if query.edns >= 0: - if pad is None: - # Set response padding per RFC 8467 - pad = 0 - for option in query.options: - if option.otype == dns.edns.OptionType.PADDING: - pad = 468 - response.use_edns(0, 0, our_payload, query.payload, pad=pad) - if query.had_tsig: - response.use_tsig( - query.keyring, - query.keyname, - fudge, - None, - tsig_error, - b"", - query.keyalgorithm, - ) - response.request_mac = query.mac - return response - - -### BEGIN generated MessageSection constants - -QUESTION = MessageSection.QUESTION -ANSWER = MessageSection.ANSWER -AUTHORITY = MessageSection.AUTHORITY -ADDITIONAL = MessageSection.ADDITIONAL - -### END generated MessageSection constants diff --git a/venv/lib/python3.12/site-packages/dns/name.py b/venv/lib/python3.12/site-packages/dns/name.py deleted file mode 100644 index f79f0d0f..00000000 --- a/venv/lib/python3.12/site-packages/dns/name.py +++ /dev/null @@ -1,1284 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Names. -""" - -import copy -import encodings.idna # type: ignore -import functools -import struct -from typing import Any, Callable, Dict, Iterable, Optional, Tuple, Union - -import dns._features -import dns.enum -import dns.exception -import dns.immutable -import dns.wire - -if dns._features.have("idna"): - import idna # type: ignore - - have_idna_2008 = True -else: # pragma: no cover - have_idna_2008 = False - -CompressType = Dict["Name", int] - - -class NameRelation(dns.enum.IntEnum): - """Name relation result from fullcompare().""" - - # This is an IntEnum for backwards compatibility in case anyone - # has hardwired the constants. - - #: The compared names have no relationship to each other. - NONE = 0 - #: the first name is a superdomain of the second. - SUPERDOMAIN = 1 - #: The first name is a subdomain of the second. - SUBDOMAIN = 2 - #: The compared names are equal. - EQUAL = 3 - #: The compared names have a common ancestor. - COMMONANCESTOR = 4 - - @classmethod - def _maximum(cls): - return cls.COMMONANCESTOR # pragma: no cover - - @classmethod - def _short_name(cls): - return cls.__name__ # pragma: no cover - - -# Backwards compatibility -NAMERELN_NONE = NameRelation.NONE -NAMERELN_SUPERDOMAIN = NameRelation.SUPERDOMAIN -NAMERELN_SUBDOMAIN = NameRelation.SUBDOMAIN -NAMERELN_EQUAL = NameRelation.EQUAL -NAMERELN_COMMONANCESTOR = NameRelation.COMMONANCESTOR - - -class EmptyLabel(dns.exception.SyntaxError): - """A DNS label is empty.""" - - -class BadEscape(dns.exception.SyntaxError): - """An escaped code in a text format of DNS name is invalid.""" - - -class BadPointer(dns.exception.FormError): - """A DNS compression pointer points forward instead of backward.""" - - -class BadLabelType(dns.exception.FormError): - """The label type in DNS name wire format is unknown.""" - - -class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): - """An attempt was made to convert a non-absolute name to - wire when there was also a non-absolute (or missing) origin.""" - - -class NameTooLong(dns.exception.FormError): - """A DNS name is > 255 octets long.""" - - -class LabelTooLong(dns.exception.SyntaxError): - """A DNS label is > 63 octets long.""" - - -class AbsoluteConcatenation(dns.exception.DNSException): - """An attempt was made to append anything other than the - empty name to an absolute DNS name.""" - - -class NoParent(dns.exception.DNSException): - """An attempt was made to get the parent of the root name - or the empty name.""" - - -class NoIDNA2008(dns.exception.DNSException): - """IDNA 2008 processing was requested but the idna module is not - available.""" - - -class IDNAException(dns.exception.DNSException): - """IDNA processing raised an exception.""" - - supp_kwargs = {"idna_exception"} - fmt = "IDNA processing exception: {idna_exception}" - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - -class NeedSubdomainOfOrigin(dns.exception.DNSException): - """An absolute name was provided that is not a subdomain of the specified origin.""" - - -_escaped = b'"().;\\@$' -_escaped_text = '"().;\\@$' - - -def _escapify(label: Union[bytes, str]) -> str: - """Escape the characters in label which need it. - @returns: the escaped string - @rtype: string""" - if isinstance(label, bytes): - # Ordinary DNS label mode. Escape special characters and values - # < 0x20 or > 0x7f. - text = "" - for c in label: - if c in _escaped: - text += "\\" + chr(c) - elif c > 0x20 and c < 0x7F: - text += chr(c) - else: - text += "\\%03d" % c - return text - - # Unicode label mode. Escape only special characters and values < 0x20 - text = "" - for uc in label: - if uc in _escaped_text: - text += "\\" + uc - elif uc <= "\x20": - text += "\\%03d" % ord(uc) - else: - text += uc - return text - - -class IDNACodec: - """Abstract base class for IDNA encoder/decoders.""" - - def __init__(self): - pass - - def is_idna(self, label: bytes) -> bool: - return label.lower().startswith(b"xn--") - - def encode(self, label: str) -> bytes: - raise NotImplementedError # pragma: no cover - - def decode(self, label: bytes) -> str: - # We do not apply any IDNA policy on decode. - if self.is_idna(label): - try: - slabel = label[4:].decode("punycode") - return _escapify(slabel) - except Exception as e: - raise IDNAException(idna_exception=e) - else: - return _escapify(label) - - -class IDNA2003Codec(IDNACodec): - """IDNA 2003 encoder/decoder.""" - - def __init__(self, strict_decode: bool = False): - """Initialize the IDNA 2003 encoder/decoder. - - *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking - is done when decoding. This can cause failures if the name - was encoded with IDNA2008. The default is `False`. - """ - - super().__init__() - self.strict_decode = strict_decode - - def encode(self, label: str) -> bytes: - """Encode *label*.""" - - if label == "": - return b"" - try: - return encodings.idna.ToASCII(label) - except UnicodeError: - raise LabelTooLong - - def decode(self, label: bytes) -> str: - """Decode *label*.""" - if not self.strict_decode: - return super().decode(label) - if label == b"": - return "" - try: - return _escapify(encodings.idna.ToUnicode(label)) - except Exception as e: - raise IDNAException(idna_exception=e) - - -class IDNA2008Codec(IDNACodec): - """IDNA 2008 encoder/decoder.""" - - def __init__( - self, - uts_46: bool = False, - transitional: bool = False, - allow_pure_ascii: bool = False, - strict_decode: bool = False, - ): - """Initialize the IDNA 2008 encoder/decoder. - - *uts_46* is a ``bool``. If True, apply Unicode IDNA - compatibility processing as described in Unicode Technical - Standard #46 (https://unicode.org/reports/tr46/). - If False, do not apply the mapping. The default is False. - - *transitional* is a ``bool``: If True, use the - "transitional" mode described in Unicode Technical Standard - #46. The default is False. - - *allow_pure_ascii* is a ``bool``. If True, then a label which - consists of only ASCII characters is allowed. This is less - strict than regular IDNA 2008, but is also necessary for mixed - names, e.g. a name with starting with "_sip._tcp." and ending - in an IDN suffix which would otherwise be disallowed. The - default is False. - - *strict_decode* is a ``bool``: If True, then IDNA2008 checking - is done when decoding. This can cause failures if the name - was encoded with IDNA2003. The default is False. - """ - super().__init__() - self.uts_46 = uts_46 - self.transitional = transitional - self.allow_pure_ascii = allow_pure_ascii - self.strict_decode = strict_decode - - def encode(self, label: str) -> bytes: - if label == "": - return b"" - if self.allow_pure_ascii and is_all_ascii(label): - encoded = label.encode("ascii") - if len(encoded) > 63: - raise LabelTooLong - return encoded - if not have_idna_2008: - raise NoIDNA2008 - try: - if self.uts_46: - # pylint: disable=possibly-used-before-assignment - label = idna.uts46_remap(label, False, self.transitional) - return idna.alabel(label) - except idna.IDNAError as e: - if e.args[0] == "Label too long": - raise LabelTooLong - else: - raise IDNAException(idna_exception=e) - - def decode(self, label: bytes) -> str: - if not self.strict_decode: - return super().decode(label) - if label == b"": - return "" - if not have_idna_2008: - raise NoIDNA2008 - try: - ulabel = idna.ulabel(label) - if self.uts_46: - ulabel = idna.uts46_remap(ulabel, False, self.transitional) - return _escapify(ulabel) - except (idna.IDNAError, UnicodeError) as e: - raise IDNAException(idna_exception=e) - - -IDNA_2003_Practical = IDNA2003Codec(False) -IDNA_2003_Strict = IDNA2003Codec(True) -IDNA_2003 = IDNA_2003_Practical -IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) -IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) -IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) -IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) -IDNA_2008 = IDNA_2008_Practical - - -def _validate_labels(labels: Tuple[bytes, ...]) -> None: - """Check for empty labels in the middle of a label sequence, - labels that are too long, and for too many labels. - - Raises ``dns.name.NameTooLong`` if the name as a whole is too long. - - Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root - label) and appears in a position other than the end of the label - sequence - - """ - - l = len(labels) - total = 0 - i = -1 - j = 0 - for label in labels: - ll = len(label) - total += ll + 1 - if ll > 63: - raise LabelTooLong - if i < 0 and label == b"": - i = j - j += 1 - if total > 255: - raise NameTooLong - if i >= 0 and i != l - 1: - raise EmptyLabel - - -def _maybe_convert_to_binary(label: Union[bytes, str]) -> bytes: - """If label is ``str``, convert it to ``bytes``. If it is already - ``bytes`` just return it. - - """ - - if isinstance(label, bytes): - return label - if isinstance(label, str): - return label.encode() - raise ValueError # pragma: no cover - - -@dns.immutable.immutable -class Name: - """A DNS name. - - The dns.name.Name class represents a DNS name as a tuple of - labels. Each label is a ``bytes`` in DNS wire format. Instances - of the class are immutable. - """ - - __slots__ = ["labels"] - - def __init__(self, labels: Iterable[Union[bytes, str]]): - """*labels* is any iterable whose values are ``str`` or ``bytes``.""" - - blabels = [_maybe_convert_to_binary(x) for x in labels] - self.labels = tuple(blabels) - _validate_labels(self.labels) - - def __copy__(self): - return Name(self.labels) - - def __deepcopy__(self, memo): - return Name(copy.deepcopy(self.labels, memo)) - - def __getstate__(self): - # Names can be pickled - return {"labels": self.labels} - - def __setstate__(self, state): - super().__setattr__("labels", state["labels"]) - _validate_labels(self.labels) - - def is_absolute(self) -> bool: - """Is the most significant label of this name the root label? - - Returns a ``bool``. - """ - - return len(self.labels) > 0 and self.labels[-1] == b"" - - def is_wild(self) -> bool: - """Is this name wild? (I.e. Is the least significant label '*'?) - - Returns a ``bool``. - """ - - return len(self.labels) > 0 and self.labels[0] == b"*" - - def __hash__(self) -> int: - """Return a case-insensitive hash of the name. - - Returns an ``int``. - """ - - h = 0 - for label in self.labels: - for c in label.lower(): - h += (h << 3) + c - return h - - def fullcompare(self, other: "Name") -> Tuple[NameRelation, int, int]: - """Compare two names, returning a 3-tuple - ``(relation, order, nlabels)``. - - *relation* describes the relation ship between the names, - and is one of: ``dns.name.NameRelation.NONE``, - ``dns.name.NameRelation.SUPERDOMAIN``, ``dns.name.NameRelation.SUBDOMAIN``, - ``dns.name.NameRelation.EQUAL``, or ``dns.name.NameRelation.COMMONANCESTOR``. - - *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == - 0 if *self* == *other*. A relative name is always less than an - absolute name. If both names have the same relativity, then - the DNSSEC order relation is used to order them. - - *nlabels* is the number of significant labels that the two names - have in common. - - Here are some examples. Names ending in "." are absolute names, - those not ending in "." are relative names. - - ============= ============= =========== ===== ======= - self other relation order nlabels - ============= ============= =========== ===== ======= - www.example. www.example. equal 0 3 - www.example. example. subdomain > 0 2 - example. www.example. superdomain < 0 2 - example1.com. example2.com. common anc. < 0 2 - example1 example2. none < 0 0 - example1. example2 none > 0 0 - ============= ============= =========== ===== ======= - """ - - sabs = self.is_absolute() - oabs = other.is_absolute() - if sabs != oabs: - if sabs: - return (NameRelation.NONE, 1, 0) - else: - return (NameRelation.NONE, -1, 0) - l1 = len(self.labels) - l2 = len(other.labels) - ldiff = l1 - l2 - if ldiff < 0: - l = l1 - else: - l = l2 - - order = 0 - nlabels = 0 - namereln = NameRelation.NONE - while l > 0: - l -= 1 - l1 -= 1 - l2 -= 1 - label1 = self.labels[l1].lower() - label2 = other.labels[l2].lower() - if label1 < label2: - order = -1 - if nlabels > 0: - namereln = NameRelation.COMMONANCESTOR - return (namereln, order, nlabels) - elif label1 > label2: - order = 1 - if nlabels > 0: - namereln = NameRelation.COMMONANCESTOR - return (namereln, order, nlabels) - nlabels += 1 - order = ldiff - if ldiff < 0: - namereln = NameRelation.SUPERDOMAIN - elif ldiff > 0: - namereln = NameRelation.SUBDOMAIN - else: - namereln = NameRelation.EQUAL - return (namereln, order, nlabels) - - def is_subdomain(self, other: "Name") -> bool: - """Is self a subdomain of other? - - Note that the notion of subdomain includes equality, e.g. - "dnspython.org" is a subdomain of itself. - - Returns a ``bool``. - """ - - (nr, _, _) = self.fullcompare(other) - if nr == NameRelation.SUBDOMAIN or nr == NameRelation.EQUAL: - return True - return False - - def is_superdomain(self, other: "Name") -> bool: - """Is self a superdomain of other? - - Note that the notion of superdomain includes equality, e.g. - "dnspython.org" is a superdomain of itself. - - Returns a ``bool``. - """ - - (nr, _, _) = self.fullcompare(other) - if nr == NameRelation.SUPERDOMAIN or nr == NameRelation.EQUAL: - return True - return False - - def canonicalize(self) -> "Name": - """Return a name which is equal to the current name, but is in - DNSSEC canonical form. - """ - - return Name([x.lower() for x in self.labels]) - - def __eq__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] == 0 - else: - return False - - def __ne__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] != 0 - else: - return True - - def __lt__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] < 0 - else: - return NotImplemented - - def __le__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] <= 0 - else: - return NotImplemented - - def __ge__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] >= 0 - else: - return NotImplemented - - def __gt__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] > 0 - else: - return NotImplemented - - def __repr__(self): - return "" - - def __str__(self): - return self.to_text(False) - - def to_text(self, omit_final_dot: bool = False) -> str: - """Convert name to DNS text format. - - *omit_final_dot* is a ``bool``. If True, don't emit the final - dot (denoting the root label) for absolute names. The default - is False. - - Returns a ``str``. - """ - - if len(self.labels) == 0: - return "@" - if len(self.labels) == 1 and self.labels[0] == b"": - return "." - if omit_final_dot and self.is_absolute(): - l = self.labels[:-1] - else: - l = self.labels - s = ".".join(map(_escapify, l)) - return s - - def to_unicode( - self, omit_final_dot: bool = False, idna_codec: Optional[IDNACodec] = None - ) -> str: - """Convert name to Unicode text format. - - IDN ACE labels are converted to Unicode. - - *omit_final_dot* is a ``bool``. If True, don't emit the final - dot (denoting the root label) for absolute names. The default - is False. - *idna_codec* specifies the IDNA encoder/decoder. If None, the - dns.name.IDNA_2003_Practical encoder/decoder is used. - The IDNA_2003_Practical decoder does - not impose any policy, it just decodes punycode, so if you - don't want checking for compliance, you can use this decoder - for IDNA2008 as well. - - Returns a ``str``. - """ - - if len(self.labels) == 0: - return "@" - if len(self.labels) == 1 and self.labels[0] == b"": - return "." - if omit_final_dot and self.is_absolute(): - l = self.labels[:-1] - else: - l = self.labels - if idna_codec is None: - idna_codec = IDNA_2003_Practical - return ".".join([idna_codec.decode(x) for x in l]) - - def to_digestable(self, origin: Optional["Name"] = None) -> bytes: - """Convert name to a format suitable for digesting in hashes. - - The name is canonicalized and converted to uncompressed wire - format. All names in wire format are absolute. If the name - is a relative name, then an origin must be supplied. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then origin will be appended - to the name. - - Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is - relative and no origin was provided. - - Returns a ``bytes``. - """ - - digest = self.to_wire(origin=origin, canonicalize=True) - assert digest is not None - return digest - - def to_wire( - self, - file: Optional[Any] = None, - compress: Optional[CompressType] = None, - origin: Optional["Name"] = None, - canonicalize: bool = False, - ) -> Optional[bytes]: - """Convert name to wire format, possibly compressing it. - - *file* is the file where the name is emitted (typically an - io.BytesIO file). If ``None`` (the default), a ``bytes`` - containing the wire name will be returned. - - *compress*, a ``dict``, is the compression table to use. If - ``None`` (the default), names will not be compressed. Note that - the compression code assumes that compression offset 0 is the - start of *file*, and thus compression will not be correct - if this is not the case. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then *origin* will be appended - to it. - - *canonicalize*, a ``bool``, indicates whether the name should - be canonicalized; that is, converted to a format suitable for - digesting in hashes. - - Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is - relative and no origin was provided. - - Returns a ``bytes`` or ``None``. - """ - - if file is None: - out = bytearray() - for label in self.labels: - out.append(len(label)) - if canonicalize: - out += label.lower() - else: - out += label - if not self.is_absolute(): - if origin is None or not origin.is_absolute(): - raise NeedAbsoluteNameOrOrigin - for label in origin.labels: - out.append(len(label)) - if canonicalize: - out += label.lower() - else: - out += label - return bytes(out) - - labels: Iterable[bytes] - if not self.is_absolute(): - if origin is None or not origin.is_absolute(): - raise NeedAbsoluteNameOrOrigin - labels = list(self.labels) - labels.extend(list(origin.labels)) - else: - labels = self.labels - i = 0 - for label in labels: - n = Name(labels[i:]) - i += 1 - if compress is not None: - pos = compress.get(n) - else: - pos = None - if pos is not None: - value = 0xC000 + pos - s = struct.pack("!H", value) - file.write(s) - break - else: - if compress is not None and len(n) > 1: - pos = file.tell() - if pos <= 0x3FFF: - compress[n] = pos - l = len(label) - file.write(struct.pack("!B", l)) - if l > 0: - if canonicalize: - file.write(label.lower()) - else: - file.write(label) - return None - - def __len__(self) -> int: - """The length of the name (in labels). - - Returns an ``int``. - """ - - return len(self.labels) - - def __getitem__(self, index): - return self.labels[index] - - def __add__(self, other): - return self.concatenate(other) - - def __sub__(self, other): - return self.relativize(other) - - def split(self, depth: int) -> Tuple["Name", "Name"]: - """Split a name into a prefix and suffix names at the specified depth. - - *depth* is an ``int`` specifying the number of labels in the suffix - - Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the - name. - - Returns the tuple ``(prefix, suffix)``. - """ - - l = len(self.labels) - if depth == 0: - return (self, dns.name.empty) - elif depth == l: - return (dns.name.empty, self) - elif depth < 0 or depth > l: - raise ValueError("depth must be >= 0 and <= the length of the name") - return (Name(self[:-depth]), Name(self[-depth:])) - - def concatenate(self, other: "Name") -> "Name": - """Return a new name which is the concatenation of self and other. - - Raises ``dns.name.AbsoluteConcatenation`` if the name is - absolute and *other* is not the empty name. - - Returns a ``dns.name.Name``. - """ - - if self.is_absolute() and len(other) > 0: - raise AbsoluteConcatenation - labels = list(self.labels) - labels.extend(list(other.labels)) - return Name(labels) - - def relativize(self, origin: "Name") -> "Name": - """If the name is a subdomain of *origin*, return a new name which is - the name relative to origin. Otherwise return the name. - - For example, relativizing ``www.dnspython.org.`` to origin - ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` - to origin ``dnspython.org.`` returns ``example.``. - - Returns a ``dns.name.Name``. - """ - - if origin is not None and self.is_subdomain(origin): - return Name(self[: -len(origin)]) - else: - return self - - def derelativize(self, origin: "Name") -> "Name": - """If the name is a relative name, return a new name which is the - concatenation of the name and origin. Otherwise return the name. - - For example, derelativizing ``www`` to origin ``dnspython.org.`` - returns the name ``www.dnspython.org.``. Derelativizing ``example.`` - to origin ``dnspython.org.`` returns ``example.``. - - Returns a ``dns.name.Name``. - """ - - if not self.is_absolute(): - return self.concatenate(origin) - else: - return self - - def choose_relativity( - self, origin: Optional["Name"] = None, relativize: bool = True - ) -> "Name": - """Return a name with the relativity desired by the caller. - - If *origin* is ``None``, then the name is returned. - Otherwise, if *relativize* is ``True`` the name is - relativized, and if *relativize* is ``False`` the name is - derelativized. - - Returns a ``dns.name.Name``. - """ - - if origin: - if relativize: - return self.relativize(origin) - else: - return self.derelativize(origin) - else: - return self - - def parent(self) -> "Name": - """Return the parent of the name. - - For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. - - Raises ``dns.name.NoParent`` if the name is either the root name or the - empty name, and thus has no parent. - - Returns a ``dns.name.Name``. - """ - - if self == root or self == empty: - raise NoParent - return Name(self.labels[1:]) - - def predecessor(self, origin: "Name", prefix_ok: bool = True) -> "Name": - """Return the maximal predecessor of *name* in the DNSSEC ordering in the zone - whose origin is *origin*, or return the longest name under *origin* if the - name is origin (i.e. wrap around to the longest name, which may still be - *origin* due to length considerations. - - The relativity of the name is preserved, so if this name is relative - then the method will return a relative name, and likewise if this name - is absolute then the predecessor will be absolute. - - *prefix_ok* indicates if prefixing labels is allowed, and - defaults to ``True``. Normally it is good to allow this, but if computing - a maximal predecessor at a zone cut point then ``False`` must be specified. - """ - return _handle_relativity_and_call( - _absolute_predecessor, self, origin, prefix_ok - ) - - def successor(self, origin: "Name", prefix_ok: bool = True) -> "Name": - """Return the minimal successor of *name* in the DNSSEC ordering in the zone - whose origin is *origin*, or return *origin* if the successor cannot be - computed due to name length limitations. - - Note that *origin* is returned in the "too long" cases because wrapping - around to the origin is how NSEC records express "end of the zone". - - The relativity of the name is preserved, so if this name is relative - then the method will return a relative name, and likewise if this name - is absolute then the successor will be absolute. - - *prefix_ok* indicates if prefixing a new minimal label is allowed, and - defaults to ``True``. Normally it is good to allow this, but if computing - a minimal successor at a zone cut point then ``False`` must be specified. - """ - return _handle_relativity_and_call(_absolute_successor, self, origin, prefix_ok) - - -#: The root name, '.' -root = Name([b""]) - -#: The empty name. -empty = Name([]) - - -def from_unicode( - text: str, origin: Optional[Name] = root, idna_codec: Optional[IDNACodec] = None -) -> Name: - """Convert unicode text into a Name object. - - Labels are encoded in IDN ACE form according to rules specified by - the IDNA codec. - - *text*, a ``str``, is the text to convert into a name. - - *origin*, a ``dns.name.Name``, specifies the origin to - append to non-absolute names. The default is the root name. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.name.Name``. - """ - - if not isinstance(text, str): - raise ValueError("input to from_unicode() must be a unicode string") - if not (origin is None or isinstance(origin, Name)): - raise ValueError("origin must be a Name or None") - labels = [] - label = "" - escaping = False - edigits = 0 - total = 0 - if idna_codec is None: - idna_codec = IDNA_2003 - if text == "@": - text = "" - if text: - if text in [".", "\u3002", "\uff0e", "\uff61"]: - return Name([b""]) # no Unicode "u" on this constant! - for c in text: - if escaping: - if edigits == 0: - if c.isdigit(): - total = int(c) - edigits += 1 - else: - label += c - escaping = False - else: - if not c.isdigit(): - raise BadEscape - total *= 10 - total += int(c) - edigits += 1 - if edigits == 3: - escaping = False - label += chr(total) - elif c in [".", "\u3002", "\uff0e", "\uff61"]: - if len(label) == 0: - raise EmptyLabel - labels.append(idna_codec.encode(label)) - label = "" - elif c == "\\": - escaping = True - edigits = 0 - total = 0 - else: - label += c - if escaping: - raise BadEscape - if len(label) > 0: - labels.append(idna_codec.encode(label)) - else: - labels.append(b"") - - if (len(labels) == 0 or labels[-1] != b"") and origin is not None: - labels.extend(list(origin.labels)) - return Name(labels) - - -def is_all_ascii(text: str) -> bool: - for c in text: - if ord(c) > 0x7F: - return False - return True - - -def from_text( - text: Union[bytes, str], - origin: Optional[Name] = root, - idna_codec: Optional[IDNACodec] = None, -) -> Name: - """Convert text into a Name object. - - *text*, a ``bytes`` or ``str``, is the text to convert into a name. - - *origin*, a ``dns.name.Name``, specifies the origin to - append to non-absolute names. The default is the root name. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.name.Name``. - """ - - if isinstance(text, str): - if not is_all_ascii(text): - # Some codepoint in the input text is > 127, so IDNA applies. - return from_unicode(text, origin, idna_codec) - # The input is all ASCII, so treat this like an ordinary non-IDNA - # domain name. Note that "all ASCII" is about the input text, - # not the codepoints in the domain name. E.g. if text has value - # - # r'\150\151\152\153\154\155\156\157\158\159' - # - # then it's still "all ASCII" even though the domain name has - # codepoints > 127. - text = text.encode("ascii") - if not isinstance(text, bytes): - raise ValueError("input to from_text() must be a string") - if not (origin is None or isinstance(origin, Name)): - raise ValueError("origin must be a Name or None") - labels = [] - label = b"" - escaping = False - edigits = 0 - total = 0 - if text == b"@": - text = b"" - if text: - if text == b".": - return Name([b""]) - for c in text: - byte_ = struct.pack("!B", c) - if escaping: - if edigits == 0: - if byte_.isdigit(): - total = int(byte_) - edigits += 1 - else: - label += byte_ - escaping = False - else: - if not byte_.isdigit(): - raise BadEscape - total *= 10 - total += int(byte_) - edigits += 1 - if edigits == 3: - escaping = False - label += struct.pack("!B", total) - elif byte_ == b".": - if len(label) == 0: - raise EmptyLabel - labels.append(label) - label = b"" - elif byte_ == b"\\": - escaping = True - edigits = 0 - total = 0 - else: - label += byte_ - if escaping: - raise BadEscape - if len(label) > 0: - labels.append(label) - else: - labels.append(b"") - if (len(labels) == 0 or labels[-1] != b"") and origin is not None: - labels.extend(list(origin.labels)) - return Name(labels) - - -# we need 'dns.wire.Parser' quoted as dns.name and dns.wire depend on each other. - - -def from_wire_parser(parser: "dns.wire.Parser") -> Name: - """Convert possibly compressed wire format into a Name. - - *parser* is a dns.wire.Parser. - - Raises ``dns.name.BadPointer`` if a compression pointer did not - point backwards in the message. - - Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. - - Returns a ``dns.name.Name`` - """ - - labels = [] - biggest_pointer = parser.current - with parser.restore_furthest(): - count = parser.get_uint8() - while count != 0: - if count < 64: - labels.append(parser.get_bytes(count)) - elif count >= 192: - current = (count & 0x3F) * 256 + parser.get_uint8() - if current >= biggest_pointer: - raise BadPointer - biggest_pointer = current - parser.seek(current) - else: - raise BadLabelType - count = parser.get_uint8() - labels.append(b"") - return Name(labels) - - -def from_wire(message: bytes, current: int) -> Tuple[Name, int]: - """Convert possibly compressed wire format into a Name. - - *message* is a ``bytes`` containing an entire DNS message in DNS - wire form. - - *current*, an ``int``, is the offset of the beginning of the name - from the start of the message - - Raises ``dns.name.BadPointer`` if a compression pointer did not - point backwards in the message. - - Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. - - Returns a ``(dns.name.Name, int)`` tuple consisting of the name - that was read and the number of bytes of the wire format message - which were consumed reading it. - """ - - if not isinstance(message, bytes): - raise ValueError("input to from_wire() must be a byte string") - parser = dns.wire.Parser(message, current) - name = from_wire_parser(parser) - return (name, parser.current - current) - - -# RFC 4471 Support - -_MINIMAL_OCTET = b"\x00" -_MINIMAL_OCTET_VALUE = ord(_MINIMAL_OCTET) -_SUCCESSOR_PREFIX = Name([_MINIMAL_OCTET]) -_MAXIMAL_OCTET = b"\xff" -_MAXIMAL_OCTET_VALUE = ord(_MAXIMAL_OCTET) -_AT_SIGN_VALUE = ord("@") -_LEFT_SQUARE_BRACKET_VALUE = ord("[") - - -def _wire_length(labels): - return functools.reduce(lambda v, x: v + len(x) + 1, labels, 0) - - -def _pad_to_max_name(name): - needed = 255 - _wire_length(name.labels) - new_labels = [] - while needed > 64: - new_labels.append(_MAXIMAL_OCTET * 63) - needed -= 64 - if needed >= 2: - new_labels.append(_MAXIMAL_OCTET * (needed - 1)) - # Note we're already maximal in the needed == 1 case as while we'd like - # to add one more byte as a new label, we can't, as adding a new non-empty - # label requires at least 2 bytes. - new_labels = list(reversed(new_labels)) - new_labels.extend(name.labels) - return Name(new_labels) - - -def _pad_to_max_label(label, suffix_labels): - length = len(label) - # We have to subtract one here to account for the length byte of label. - remaining = 255 - _wire_length(suffix_labels) - length - 1 - if remaining <= 0: - # Shouldn't happen! - return label - needed = min(63 - length, remaining) - return label + _MAXIMAL_OCTET * needed - - -def _absolute_predecessor(name: Name, origin: Name, prefix_ok: bool) -> Name: - # This is the RFC 4471 predecessor algorithm using the "absolute method" of section - # 3.1.1. - # - # Our caller must ensure that the name and origin are absolute, and that name is a - # subdomain of origin. - if name == origin: - return _pad_to_max_name(name) - least_significant_label = name[0] - if least_significant_label == _MINIMAL_OCTET: - return name.parent() - least_octet = least_significant_label[-1] - suffix_labels = name.labels[1:] - if least_octet == _MINIMAL_OCTET_VALUE: - new_labels = [least_significant_label[:-1]] - else: - octets = bytearray(least_significant_label) - octet = octets[-1] - if octet == _LEFT_SQUARE_BRACKET_VALUE: - octet = _AT_SIGN_VALUE - else: - octet -= 1 - octets[-1] = octet - least_significant_label = bytes(octets) - new_labels = [_pad_to_max_label(least_significant_label, suffix_labels)] - new_labels.extend(suffix_labels) - name = Name(new_labels) - if prefix_ok: - return _pad_to_max_name(name) - else: - return name - - -def _absolute_successor(name: Name, origin: Name, prefix_ok: bool) -> Name: - # This is the RFC 4471 successor algorithm using the "absolute method" of section - # 3.1.2. - # - # Our caller must ensure that the name and origin are absolute, and that name is a - # subdomain of origin. - if prefix_ok: - # Try prefixing \000 as new label - try: - return _SUCCESSOR_PREFIX.concatenate(name) - except NameTooLong: - pass - while name != origin: - # Try extending the least significant label. - least_significant_label = name[0] - if len(least_significant_label) < 63: - # We may be able to extend the least label with a minimal additional byte. - # This is only "may" because we could have a maximal length name even though - # the least significant label isn't maximally long. - new_labels = [least_significant_label + _MINIMAL_OCTET] - new_labels.extend(name.labels[1:]) - try: - return dns.name.Name(new_labels) - except dns.name.NameTooLong: - pass - # We can't extend the label either, so we'll try to increment the least - # signficant non-maximal byte in it. - octets = bytearray(least_significant_label) - # We do this reversed iteration with an explicit indexing variable because - # if we find something to increment, we're going to want to truncate everything - # to the right of it. - for i in range(len(octets) - 1, -1, -1): - octet = octets[i] - if octet == _MAXIMAL_OCTET_VALUE: - # We can't increment this, so keep looking. - continue - # Finally, something we can increment. We have to apply a special rule for - # incrementing "@", sending it to "[", because RFC 4034 6.1 says that when - # comparing names, uppercase letters compare as if they were their - # lower-case equivalents. If we increment "@" to "A", then it would compare - # as "a", which is after "[", "\", "]", "^", "_", and "`", so we would have - # skipped the most minimal successor, namely "[". - if octet == _AT_SIGN_VALUE: - octet = _LEFT_SQUARE_BRACKET_VALUE - else: - octet += 1 - octets[i] = octet - # We can now truncate all of the maximal values we skipped (if any) - new_labels = [bytes(octets[: i + 1])] - new_labels.extend(name.labels[1:]) - # We haven't changed the length of the name, so the Name constructor will - # always work. - return Name(new_labels) - # We couldn't increment, so chop off the least significant label and try - # again. - name = name.parent() - - # We couldn't increment at all, so return the origin, as wrapping around is the - # DNSSEC way. - return origin - - -def _handle_relativity_and_call( - function: Callable[[Name, Name, bool], Name], - name: Name, - origin: Name, - prefix_ok: bool, -) -> Name: - # Make "name" absolute if needed, ensure that the origin is absolute, - # call function(), and then relativize the result if needed. - if not origin.is_absolute(): - raise NeedAbsoluteNameOrOrigin - relative = not name.is_absolute() - if relative: - name = name.derelativize(origin) - elif not name.is_subdomain(origin): - raise NeedSubdomainOfOrigin - result_name = function(name, origin, prefix_ok) - if relative: - result_name = result_name.relativize(origin) - return result_name diff --git a/venv/lib/python3.12/site-packages/dns/namedict.py b/venv/lib/python3.12/site-packages/dns/namedict.py deleted file mode 100644 index ca8b1978..00000000 --- a/venv/lib/python3.12/site-packages/dns/namedict.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# Copyright (C) 2016 Coresec Systems AB -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC -# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS name dictionary""" - -# pylint seems to be confused about this one! -from collections.abc import MutableMapping # pylint: disable=no-name-in-module - -import dns.name - - -class NameDict(MutableMapping): - """A dictionary whose keys are dns.name.Name objects. - - In addition to being like a regular Python dictionary, this - dictionary can also get the deepest match for a given key. - """ - - __slots__ = ["max_depth", "max_depth_items", "__store"] - - def __init__(self, *args, **kwargs): - super().__init__() - self.__store = dict() - #: the maximum depth of the keys that have ever been added - self.max_depth = 0 - #: the number of items of maximum depth - self.max_depth_items = 0 - self.update(dict(*args, **kwargs)) - - def __update_max_depth(self, key): - if len(key) == self.max_depth: - self.max_depth_items = self.max_depth_items + 1 - elif len(key) > self.max_depth: - self.max_depth = len(key) - self.max_depth_items = 1 - - def __getitem__(self, key): - return self.__store[key] - - def __setitem__(self, key, value): - if not isinstance(key, dns.name.Name): - raise ValueError("NameDict key must be a name") - self.__store[key] = value - self.__update_max_depth(key) - - def __delitem__(self, key): - self.__store.pop(key) - if len(key) == self.max_depth: - self.max_depth_items = self.max_depth_items - 1 - if self.max_depth_items == 0: - self.max_depth = 0 - for k in self.__store: - self.__update_max_depth(k) - - def __iter__(self): - return iter(self.__store) - - def __len__(self): - return len(self.__store) - - def has_key(self, key): - return key in self.__store - - def get_deepest_match(self, name): - """Find the deepest match to *name* in the dictionary. - - The deepest match is the longest name in the dictionary which is - a superdomain of *name*. Note that *superdomain* includes matching - *name* itself. - - *name*, a ``dns.name.Name``, the name to find. - - Returns a ``(key, value)`` where *key* is the deepest - ``dns.name.Name``, and *value* is the value associated with *key*. - """ - - depth = len(name) - if depth > self.max_depth: - depth = self.max_depth - for i in range(-depth, 0): - n = dns.name.Name(name[i:]) - if n in self: - return (n, self[n]) - v = self[dns.name.empty] - return (dns.name.empty, v) diff --git a/venv/lib/python3.12/site-packages/dns/nameserver.py b/venv/lib/python3.12/site-packages/dns/nameserver.py deleted file mode 100644 index b02a239b..00000000 --- a/venv/lib/python3.12/site-packages/dns/nameserver.py +++ /dev/null @@ -1,363 +0,0 @@ -from typing import Optional, Union -from urllib.parse import urlparse - -import dns.asyncbackend -import dns.asyncquery -import dns.inet -import dns.message -import dns.query - - -class Nameserver: - def __init__(self): - pass - - def __str__(self): - raise NotImplementedError - - def kind(self) -> str: - raise NotImplementedError - - def is_always_max_size(self) -> bool: - raise NotImplementedError - - def answer_nameserver(self) -> str: - raise NotImplementedError - - def answer_port(self) -> int: - raise NotImplementedError - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - raise NotImplementedError - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - raise NotImplementedError - - -class AddressAndPortNameserver(Nameserver): - def __init__(self, address: str, port: int): - super().__init__() - self.address = address - self.port = port - - def kind(self) -> str: - raise NotImplementedError - - def is_always_max_size(self) -> bool: - return False - - def __str__(self): - ns_kind = self.kind() - return f"{ns_kind}:{self.address}@{self.port}" - - def answer_nameserver(self) -> str: - return self.address - - def answer_port(self) -> int: - return self.port - - -class Do53Nameserver(AddressAndPortNameserver): - def __init__(self, address: str, port: int = 53): - super().__init__(address, port) - - def kind(self): - return "Do53" - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - if max_size: - response = dns.query.tcp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - else: - response = dns.query.udp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - raise_on_truncation=True, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ignore_errors=True, - ignore_unexpected=True, - ) - return response - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - if max_size: - response = await dns.asyncquery.tcp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - backend=backend, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - else: - response = await dns.asyncquery.udp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - raise_on_truncation=True, - backend=backend, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ignore_errors=True, - ignore_unexpected=True, - ) - return response - - -class DoHNameserver(Nameserver): - def __init__( - self, - url: str, - bootstrap_address: Optional[str] = None, - verify: Union[bool, str] = True, - want_get: bool = False, - http_version: dns.query.HTTPVersion = dns.query.HTTPVersion.DEFAULT, - ): - super().__init__() - self.url = url - self.bootstrap_address = bootstrap_address - self.verify = verify - self.want_get = want_get - self.http_version = http_version - - def kind(self): - return "DoH" - - def is_always_max_size(self) -> bool: - return True - - def __str__(self): - return self.url - - def answer_nameserver(self) -> str: - return self.url - - def answer_port(self) -> int: - port = urlparse(self.url).port - if port is None: - port = 443 - return port - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return dns.query.https( - request, - self.url, - timeout=timeout, - source=source, - source_port=source_port, - bootstrap_address=self.bootstrap_address, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - verify=self.verify, - post=(not self.want_get), - http_version=self.http_version, - ) - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return await dns.asyncquery.https( - request, - self.url, - timeout=timeout, - source=source, - source_port=source_port, - bootstrap_address=self.bootstrap_address, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - verify=self.verify, - post=(not self.want_get), - http_version=self.http_version, - ) - - -class DoTNameserver(AddressAndPortNameserver): - def __init__( - self, - address: str, - port: int = 853, - hostname: Optional[str] = None, - verify: Union[bool, str] = True, - ): - super().__init__(address, port) - self.hostname = hostname - self.verify = verify - - def kind(self): - return "DoT" - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return dns.query.tls( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - server_hostname=self.hostname, - verify=self.verify, - ) - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return await dns.asyncquery.tls( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - server_hostname=self.hostname, - verify=self.verify, - ) - - -class DoQNameserver(AddressAndPortNameserver): - def __init__( - self, - address: str, - port: int = 853, - verify: Union[bool, str] = True, - server_hostname: Optional[str] = None, - ): - super().__init__(address, port) - self.verify = verify - self.server_hostname = server_hostname - - def kind(self): - return "DoQ" - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return dns.query.quic( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - verify=self.verify, - server_hostname=self.server_hostname, - ) - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return await dns.asyncquery.quic( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - verify=self.verify, - server_hostname=self.server_hostname, - ) diff --git a/venv/lib/python3.12/site-packages/dns/node.py b/venv/lib/python3.12/site-packages/dns/node.py deleted file mode 100644 index de85a82d..00000000 --- a/venv/lib/python3.12/site-packages/dns/node.py +++ /dev/null @@ -1,359 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS nodes. A node is a set of rdatasets.""" - -import enum -import io -from typing import Any, Dict, Optional - -import dns.immutable -import dns.name -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.renderer -import dns.rrset - -_cname_types = { - dns.rdatatype.CNAME, -} - -# "neutral" types can coexist with a CNAME and thus are not "other data" -_neutral_types = { - dns.rdatatype.NSEC, # RFC 4035 section 2.5 - dns.rdatatype.NSEC3, # This is not likely to happen, but not impossible! - dns.rdatatype.KEY, # RFC 4035 section 2.5, RFC 3007 -} - - -def _matches_type_or_its_signature(rdtypes, rdtype, covers): - return rdtype in rdtypes or (rdtype == dns.rdatatype.RRSIG and covers in rdtypes) - - -@enum.unique -class NodeKind(enum.Enum): - """Rdatasets in nodes""" - - REGULAR = 0 # a.k.a "other data" - NEUTRAL = 1 - CNAME = 2 - - @classmethod - def classify( - cls, rdtype: dns.rdatatype.RdataType, covers: dns.rdatatype.RdataType - ) -> "NodeKind": - if _matches_type_or_its_signature(_cname_types, rdtype, covers): - return NodeKind.CNAME - elif _matches_type_or_its_signature(_neutral_types, rdtype, covers): - return NodeKind.NEUTRAL - else: - return NodeKind.REGULAR - - @classmethod - def classify_rdataset(cls, rdataset: dns.rdataset.Rdataset) -> "NodeKind": - return cls.classify(rdataset.rdtype, rdataset.covers) - - -class Node: - """A Node is a set of rdatasets. - - A node is either a CNAME node or an "other data" node. A CNAME - node contains only CNAME, KEY, NSEC, and NSEC3 rdatasets along with their - covering RRSIG rdatasets. An "other data" node contains any - rdataset other than a CNAME or RRSIG(CNAME) rdataset. When - changes are made to a node, the CNAME or "other data" state is - always consistent with the update, i.e. the most recent change - wins. For example, if you have a node which contains a CNAME - rdataset, and then add an MX rdataset to it, then the CNAME - rdataset will be deleted. Likewise if you have a node containing - an MX rdataset and add a CNAME rdataset, the MX rdataset will be - deleted. - """ - - __slots__ = ["rdatasets"] - - def __init__(self): - # the set of rdatasets, represented as a list. - self.rdatasets = [] - - def to_text(self, name: dns.name.Name, **kw: Dict[str, Any]) -> str: - """Convert a node to text format. - - Each rdataset at the node is printed. Any keyword arguments - to this method are passed on to the rdataset's to_text() method. - - *name*, a ``dns.name.Name``, the owner name of the - rdatasets. - - Returns a ``str``. - - """ - - s = io.StringIO() - for rds in self.rdatasets: - if len(rds) > 0: - s.write(rds.to_text(name, **kw)) # type: ignore[arg-type] - s.write("\n") - return s.getvalue()[:-1] - - def __repr__(self): - return "" - - def __eq__(self, other): - # - # This is inefficient. Good thing we don't need to do it much. - # - for rd in self.rdatasets: - if rd not in other.rdatasets: - return False - for rd in other.rdatasets: - if rd not in self.rdatasets: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def __len__(self): - return len(self.rdatasets) - - def __iter__(self): - return iter(self.rdatasets) - - def _append_rdataset(self, rdataset): - """Append rdataset to the node with special handling for CNAME and - other data conditions. - - Specifically, if the rdataset being appended has ``NodeKind.CNAME``, - then all rdatasets other than KEY, NSEC, NSEC3, and their covering - RRSIGs are deleted. If the rdataset being appended has - ``NodeKind.REGULAR`` then CNAME and RRSIG(CNAME) are deleted. - """ - # Make having just one rdataset at the node fast. - if len(self.rdatasets) > 0: - kind = NodeKind.classify_rdataset(rdataset) - if kind == NodeKind.CNAME: - self.rdatasets = [ - rds - for rds in self.rdatasets - if NodeKind.classify_rdataset(rds) != NodeKind.REGULAR - ] - elif kind == NodeKind.REGULAR: - self.rdatasets = [ - rds - for rds in self.rdatasets - if NodeKind.classify_rdataset(rds) != NodeKind.CNAME - ] - # Otherwise the rdataset is NodeKind.NEUTRAL and we do not need to - # edit self.rdatasets. - self.rdatasets.append(rdataset) - - def find_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - """Find an rdataset matching the specified properties in the - current node. - - *rdclass*, a ``dns.rdataclass.RdataClass``, the class of the rdataset. - - *rdtype*, a ``dns.rdatatype.RdataType``, the type of the rdataset. - - *covers*, a ``dns.rdatatype.RdataType``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If True, create the rdataset if it is not found. - - Raises ``KeyError`` if an rdataset of the desired type and class does - not exist and *create* is not ``True``. - - Returns a ``dns.rdataset.Rdataset``. - """ - - for rds in self.rdatasets: - if rds.match(rdclass, rdtype, covers): - return rds - if not create: - raise KeyError - rds = dns.rdataset.Rdataset(rdclass, rdtype, covers) - self._append_rdataset(rds) - return rds - - def get_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - """Get an rdataset matching the specified properties in the - current node. - - None is returned if an rdataset of the specified type and - class does not exist and *create* is not ``True``. - - *rdclass*, an ``int``, the class of the rdataset. - - *rdtype*, an ``int``, the type of the rdataset. - - *covers*, an ``int``, the covered type. Usually this value is - dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or - dns.rdatatype.RRSIG, then the covers value will be the rdata - type the SIG/RRSIG covers. The library treats the SIG and RRSIG - types as if they were a family of - types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much - easier to work with than if RRSIGs covering different rdata - types were aggregated into a single RRSIG rdataset. - - *create*, a ``bool``. If True, create the rdataset if it is not found. - - Returns a ``dns.rdataset.Rdataset`` or ``None``. - """ - - try: - rds = self.find_rdataset(rdclass, rdtype, covers, create) - except KeyError: - rds = None - return rds - - def delete_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ) -> None: - """Delete the rdataset matching the specified properties in the - current node. - - If a matching rdataset does not exist, it is not an error. - - *rdclass*, an ``int``, the class of the rdataset. - - *rdtype*, an ``int``, the type of the rdataset. - - *covers*, an ``int``, the covered type. - """ - - rds = self.get_rdataset(rdclass, rdtype, covers) - if rds is not None: - self.rdatasets.remove(rds) - - def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: - """Replace an rdataset. - - It is not an error if there is no rdataset matching *replacement*. - - Ownership of the *replacement* object is transferred to the node; - in other words, this method does not store a copy of *replacement* - at the node, it stores *replacement* itself. - - *replacement*, a ``dns.rdataset.Rdataset``. - - Raises ``ValueError`` if *replacement* is not a - ``dns.rdataset.Rdataset``. - """ - - if not isinstance(replacement, dns.rdataset.Rdataset): - raise ValueError("replacement is not an rdataset") - if isinstance(replacement, dns.rrset.RRset): - # RRsets are not good replacements as the match() method - # is not compatible. - replacement = replacement.to_rdataset() - self.delete_rdataset( - replacement.rdclass, replacement.rdtype, replacement.covers - ) - self._append_rdataset(replacement) - - def classify(self) -> NodeKind: - """Classify a node. - - A node which contains a CNAME or RRSIG(CNAME) is a - ``NodeKind.CNAME`` node. - - A node which contains only "neutral" types, i.e. types allowed to - co-exist with a CNAME, is a ``NodeKind.NEUTRAL`` node. The neutral - types are NSEC, NSEC3, KEY, and their associated RRSIGS. An empty node - is also considered neutral. - - A node which contains some rdataset which is not a CNAME, RRSIG(CNAME), - or a neutral type is a a ``NodeKind.REGULAR`` node. Regular nodes are - also commonly referred to as "other data". - """ - for rdataset in self.rdatasets: - kind = NodeKind.classify(rdataset.rdtype, rdataset.covers) - if kind != NodeKind.NEUTRAL: - return kind - return NodeKind.NEUTRAL - - def is_immutable(self) -> bool: - return False - - -@dns.immutable.immutable -class ImmutableNode(Node): - def __init__(self, node): - super().__init__() - self.rdatasets = tuple( - [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] - ) - - def find_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - if create: - raise TypeError("immutable") - return super().find_rdataset(rdclass, rdtype, covers, False) - - def get_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - if create: - raise TypeError("immutable") - return super().get_rdataset(rdclass, rdtype, covers, False) - - def delete_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ) -> None: - raise TypeError("immutable") - - def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: - raise TypeError("immutable") - - def is_immutable(self) -> bool: - return True diff --git a/venv/lib/python3.12/site-packages/dns/opcode.py b/venv/lib/python3.12/site-packages/dns/opcode.py deleted file mode 100644 index 78b43d2c..00000000 --- a/venv/lib/python3.12/site-packages/dns/opcode.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Opcodes.""" - -import dns.enum -import dns.exception - - -class Opcode(dns.enum.IntEnum): - #: Query - QUERY = 0 - #: Inverse Query (historical) - IQUERY = 1 - #: Server Status (unspecified and unimplemented anywhere) - STATUS = 2 - #: Notify - NOTIFY = 4 - #: Dynamic Update - UPDATE = 5 - - @classmethod - def _maximum(cls): - return 15 - - @classmethod - def _unknown_exception_class(cls): - return UnknownOpcode - - -class UnknownOpcode(dns.exception.DNSException): - """An DNS opcode is unknown.""" - - -def from_text(text: str) -> Opcode: - """Convert text into an opcode. - - *text*, a ``str``, the textual opcode - - Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. - - Returns an ``int``. - """ - - return Opcode.from_text(text) - - -def from_flags(flags: int) -> Opcode: - """Extract an opcode from DNS message flags. - - *flags*, an ``int``, the DNS flags. - - Returns an ``int``. - """ - - return Opcode((flags & 0x7800) >> 11) - - -def to_flags(value: Opcode) -> int: - """Convert an opcode to a value suitable for ORing into DNS message - flags. - - *value*, an ``int``, the DNS opcode value. - - Returns an ``int``. - """ - - return (value << 11) & 0x7800 - - -def to_text(value: Opcode) -> str: - """Convert an opcode to text. - - *value*, an ``int`` the opcode value, - - Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. - - Returns a ``str``. - """ - - return Opcode.to_text(value) - - -def is_update(flags: int) -> bool: - """Is the opcode in flags UPDATE? - - *flags*, an ``int``, the DNS message flags. - - Returns a ``bool``. - """ - - return from_flags(flags) == Opcode.UPDATE - - -### BEGIN generated Opcode constants - -QUERY = Opcode.QUERY -IQUERY = Opcode.IQUERY -STATUS = Opcode.STATUS -NOTIFY = Opcode.NOTIFY -UPDATE = Opcode.UPDATE - -### END generated Opcode constants diff --git a/venv/lib/python3.12/site-packages/dns/py.typed b/venv/lib/python3.12/site-packages/dns/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/dns/query.py b/venv/lib/python3.12/site-packages/dns/query.py deleted file mode 100644 index 0d8a977a..00000000 --- a/venv/lib/python3.12/site-packages/dns/query.py +++ /dev/null @@ -1,1665 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Talk to a DNS server.""" - -import base64 -import contextlib -import enum -import errno -import os -import os.path -import random -import selectors -import socket -import struct -import time -import urllib.parse -from typing import Any, Dict, Optional, Tuple, Union, cast - -import dns._features -import dns.exception -import dns.inet -import dns.message -import dns.name -import dns.quic -import dns.rcode -import dns.rdataclass -import dns.rdatatype -import dns.serial -import dns.transaction -import dns.tsig -import dns.xfr - - -def _remaining(expiration): - if expiration is None: - return None - timeout = expiration - time.time() - if timeout <= 0.0: - raise dns.exception.Timeout - return timeout - - -def _expiration_for_this_attempt(timeout, expiration): - if expiration is None: - return None - return min(time.time() + timeout, expiration) - - -_have_httpx = dns._features.have("doh") -if _have_httpx: - import httpcore._backends.sync - import httpx - - _CoreNetworkBackend = httpcore.NetworkBackend - _CoreSyncStream = httpcore._backends.sync.SyncStream - - class _NetworkBackend(_CoreNetworkBackend): - def __init__(self, resolver, local_port, bootstrap_address, family): - super().__init__() - self._local_port = local_port - self._resolver = resolver - self._bootstrap_address = bootstrap_address - self._family = family - - def connect_tcp( - self, host, port, timeout, local_address, socket_options=None - ): # pylint: disable=signature-differs - addresses = [] - _, expiration = _compute_times(timeout) - if dns.inet.is_address(host): - addresses.append(host) - elif self._bootstrap_address is not None: - addresses.append(self._bootstrap_address) - else: - timeout = _remaining(expiration) - family = self._family - if local_address: - family = dns.inet.af_for_address(local_address) - answers = self._resolver.resolve_name( - host, family=family, lifetime=timeout - ) - addresses = answers.addresses() - for address in addresses: - af = dns.inet.af_for_address(address) - if local_address is not None or self._local_port != 0: - source = dns.inet.low_level_address_tuple( - (local_address, self._local_port), af - ) - else: - source = None - sock = _make_socket(af, socket.SOCK_STREAM, source) - attempt_expiration = _expiration_for_this_attempt(2.0, expiration) - try: - _connect( - sock, - dns.inet.low_level_address_tuple((address, port), af), - attempt_expiration, - ) - return _CoreSyncStream(sock) - except Exception: - pass - raise httpcore.ConnectError - - def connect_unix_socket( - self, path, timeout, socket_options=None - ): # pylint: disable=signature-differs - raise NotImplementedError - - class _HTTPTransport(httpx.HTTPTransport): - def __init__( - self, - *args, - local_port=0, - bootstrap_address=None, - resolver=None, - family=socket.AF_UNSPEC, - **kwargs, - ): - if resolver is None and bootstrap_address is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.resolver - - resolver = dns.resolver.Resolver() - super().__init__(*args, **kwargs) - self._pool._network_backend = _NetworkBackend( - resolver, local_port, bootstrap_address, family - ) - -else: - - class _HTTPTransport: # type: ignore - def connect_tcp(self, host, port, timeout, local_address): - raise NotImplementedError - - -have_doh = _have_httpx - -try: - import ssl -except ImportError: # pragma: no cover - - class ssl: # type: ignore - CERT_NONE = 0 - - class WantReadException(Exception): - pass - - class WantWriteException(Exception): - pass - - class SSLContext: - pass - - class SSLSocket: - pass - - @classmethod - def create_default_context(cls, *args, **kwargs): - raise Exception("no ssl support") # pylint: disable=broad-exception-raised - - -# Function used to create a socket. Can be overridden if needed in special -# situations. -socket_factory = socket.socket - - -class UnexpectedSource(dns.exception.DNSException): - """A DNS query response came from an unexpected address or port.""" - - -class BadResponse(dns.exception.FormError): - """A DNS query response does not respond to the question asked.""" - - -class NoDOH(dns.exception.DNSException): - """DNS over HTTPS (DOH) was requested but the httpx module is not - available.""" - - -class NoDOQ(dns.exception.DNSException): - """DNS over QUIC (DOQ) was requested but the aioquic module is not - available.""" - - -# for backwards compatibility -TransferError = dns.xfr.TransferError - - -def _compute_times(timeout): - now = time.time() - if timeout is None: - return (now, None) - else: - return (now, now + timeout) - - -def _wait_for(fd, readable, writable, _, expiration): - # Use the selected selector class to wait for any of the specified - # events. An "expiration" absolute time is converted into a relative - # timeout. - # - # The unused parameter is 'error', which is always set when - # selecting for read or write, and we have no error-only selects. - - if readable and isinstance(fd, ssl.SSLSocket) and fd.pending() > 0: - return True - sel = selectors.DefaultSelector() - events = 0 - if readable: - events |= selectors.EVENT_READ - if writable: - events |= selectors.EVENT_WRITE - if events: - sel.register(fd, events) - if expiration is None: - timeout = None - else: - timeout = expiration - time.time() - if timeout <= 0.0: - raise dns.exception.Timeout - if not sel.select(timeout): - raise dns.exception.Timeout - - -def _wait_for_readable(s, expiration): - _wait_for(s, True, False, True, expiration) - - -def _wait_for_writable(s, expiration): - _wait_for(s, False, True, True, expiration) - - -def _addresses_equal(af, a1, a2): - # Convert the first value of the tuple, which is a textual format - # address into binary form, so that we are not confused by different - # textual representations of the same address - try: - n1 = dns.inet.inet_pton(af, a1[0]) - n2 = dns.inet.inet_pton(af, a2[0]) - except dns.exception.SyntaxError: - return False - return n1 == n2 and a1[1:] == a2[1:] - - -def _matches_destination(af, from_address, destination, ignore_unexpected): - # Check that from_address is appropriate for a response to a query - # sent to destination. - if not destination: - return True - if _addresses_equal(af, from_address, destination) or ( - dns.inet.is_multicast(destination[0]) and from_address[1:] == destination[1:] - ): - return True - elif ignore_unexpected: - return False - raise UnexpectedSource( - f"got a response from {from_address} instead of " f"{destination}" - ) - - -def _destination_and_source( - where, port, source, source_port, where_must_be_address=True -): - # Apply defaults and compute destination and source tuples - # suitable for use in connect(), sendto(), or bind(). - af = None - destination = None - try: - af = dns.inet.af_for_address(where) - destination = where - except Exception: - if where_must_be_address: - raise - # URLs are ok so eat the exception - if source: - saf = dns.inet.af_for_address(source) - if af: - # We know the destination af, so source had better agree! - if saf != af: - raise ValueError( - "different address families for source and destination" - ) - else: - # We didn't know the destination af, but we know the source, - # so that's our af. - af = saf - if source_port and not source: - # Caller has specified a source_port but not an address, so we - # need to return a source, and we need to use the appropriate - # wildcard address as the address. - try: - source = dns.inet.any_for_af(af) - except Exception: - # we catch this and raise ValueError for backwards compatibility - raise ValueError("source_port specified but address family is unknown") - # Convert high-level (address, port) tuples into low-level address - # tuples. - if destination: - destination = dns.inet.low_level_address_tuple((destination, port), af) - if source: - source = dns.inet.low_level_address_tuple((source, source_port), af) - return (af, destination, source) - - -def _make_socket(af, type, source, ssl_context=None, server_hostname=None): - s = socket_factory(af, type) - try: - s.setblocking(False) - if source is not None: - s.bind(source) - if ssl_context: - # LGTM gets a false positive here, as our default context is OK - return ssl_context.wrap_socket( - s, - do_handshake_on_connect=False, # lgtm[py/insecure-protocol] - server_hostname=server_hostname, - ) - else: - return s - except Exception: - s.close() - raise - - -def _maybe_get_resolver( - resolver: Optional["dns.resolver.Resolver"], -) -> "dns.resolver.Resolver": - # We need a separate method for this to avoid overriding the global - # variable "dns" with the as-yet undefined local variable "dns" - # in https(). - if resolver is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.resolver - - resolver = dns.resolver.Resolver() - return resolver - - -class HTTPVersion(enum.IntEnum): - """Which version of HTTP should be used? - - DEFAULT will select the first version from the list [2, 1.1, 3] that - is available. - """ - - DEFAULT = 0 - HTTP_1 = 1 - H1 = 1 - HTTP_2 = 2 - H2 = 2 - HTTP_3 = 3 - H3 = 3 - - -def https( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 443, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - session: Optional[Any] = None, - path: str = "/dns-query", - post: bool = True, - bootstrap_address: Optional[str] = None, - verify: Union[bool, str] = True, - resolver: Optional["dns.resolver.Resolver"] = None, - family: int = socket.AF_UNSPEC, - http_version: HTTPVersion = HTTPVersion.DEFAULT, -) -> dns.message.Message: - """Return the response obtained after sending a query via DNS-over-HTTPS. - - *q*, a ``dns.message.Message``, the query to send. - - *where*, a ``str``, the nameserver IP address or the full URL. If an IP address is - given, the URL will be constructed using the following schema: - https://:/. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query - times out. If ``None``, the default, wait forever. - - *port*, a ``int``, the port to send the query to. The default is 443. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source - address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. The default is - 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the - received message. - - *session*, an ``httpx.Client``. If provided, the client session to use to send the - queries. - - *path*, a ``str``. If *where* is an IP address, then *path* will be used to - construct the URL to send the DNS query to. - - *post*, a ``bool``. If ``True``, the default, POST method will be used. - - *bootstrap_address*, a ``str``, the IP address to use to bypass resolution. - - *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification - of the server is done using the default CA bundle; if ``False``, then no - verification is done; if a `str` then it specifies the path to a certificate file or - directory which will be used for verification. - - *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for - resolution of hostnames in URLs. If not specified, a new resolver with a default - configuration will be used; note this is *not* the default resolver as that resolver - might have been configured to use DoH causing a chicken-and-egg problem. This - parameter only has an effect if the HTTP library is httpx. - - *family*, an ``int``, the address family. If socket.AF_UNSPEC (the default), both A - and AAAA records will be retrieved. - - *http_version*, a ``dns.query.HTTPVersion``, indicating which HTTP version to use. - - Returns a ``dns.message.Message``. - """ - - (af, _, the_source) = _destination_and_source( - where, port, source, source_port, False - ) - if af is not None and dns.inet.is_address(where): - if af == socket.AF_INET: - url = f"https://{where}:{port}{path}" - elif af == socket.AF_INET6: - url = f"https://[{where}]:{port}{path}" - else: - url = where - - extensions = {} - if bootstrap_address is None: - # pylint: disable=possibly-used-before-assignment - parsed = urllib.parse.urlparse(url) - if parsed.hostname is None: - raise ValueError("no hostname in URL") - if dns.inet.is_address(parsed.hostname): - bootstrap_address = parsed.hostname - extensions["sni_hostname"] = parsed.hostname - if parsed.port is not None: - port = parsed.port - - if http_version == HTTPVersion.H3 or ( - http_version == HTTPVersion.DEFAULT and not have_doh - ): - if bootstrap_address is None: - resolver = _maybe_get_resolver(resolver) - assert parsed.hostname is not None # for mypy - answers = resolver.resolve_name(parsed.hostname, family) - bootstrap_address = random.choice(list(answers.addresses())) - return _http3( - q, - bootstrap_address, - url, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - verify=verify, - post=post, - ) - - if not have_doh: - raise NoDOH # pragma: no cover - if session and not isinstance(session, httpx.Client): - raise ValueError("session parameter must be an httpx.Client") - - wire = q.to_wire() - headers = {"accept": "application/dns-message"} - - h1 = http_version in (HTTPVersion.H1, HTTPVersion.DEFAULT) - h2 = http_version in (HTTPVersion.H2, HTTPVersion.DEFAULT) - - # set source port and source address - - if the_source is None: - local_address = None - local_port = 0 - else: - local_address = the_source[0] - local_port = the_source[1] - - if session: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(session) - else: - transport = _HTTPTransport( - local_address=local_address, - http1=h1, - http2=h2, - verify=verify, - local_port=local_port, - bootstrap_address=bootstrap_address, - resolver=resolver, - family=family, - ) - - cm = httpx.Client(http1=h1, http2=h2, verify=verify, transport=transport) - with cm as session: - # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH - # GET and POST examples - if post: - headers.update( - { - "content-type": "application/dns-message", - "content-length": str(len(wire)), - } - ) - response = session.post( - url, - headers=headers, - content=wire, - timeout=timeout, - extensions=extensions, - ) - else: - wire = base64.urlsafe_b64encode(wire).rstrip(b"=") - twire = wire.decode() # httpx does a repr() if we give it bytes - response = session.get( - url, - headers=headers, - timeout=timeout, - params={"dns": twire}, - extensions=extensions, - ) - - # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH - # status codes - if response.status_code < 200 or response.status_code > 299: - raise ValueError( - f"{where} responded with status code {response.status_code}" - f"\nResponse body: {response.content}" - ) - r = dns.message.from_wire( - response.content, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = response.elapsed.total_seconds() - if not q.is_response(r): - raise BadResponse - return r - - -def _find_header(headers: dns.quic.Headers, name: bytes) -> bytes: - if headers is None: - raise KeyError - for header, value in headers: - if header == name: - return value - raise KeyError - - -def _check_status(headers: dns.quic.Headers, peer: str, wire: bytes) -> None: - value = _find_header(headers, b":status") - if value is None: - raise SyntaxError("no :status header in response") - status = int(value) - if status < 0: - raise SyntaxError("status is negative") - if status < 200 or status > 299: - error = "" - if len(wire) > 0: - try: - error = ": " + wire.decode() - except Exception: - pass - raise ValueError(f"{peer} responded with status code {status}{error}") - - -def _http3( - q: dns.message.Message, - where: str, - url: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - verify: Union[bool, str] = True, - hostname: Optional[str] = None, - post: bool = True, -) -> dns.message.Message: - if not dns.quic.have_quic: - raise NoDOH("DNS-over-HTTP3 is not available.") # pragma: no cover - - url_parts = urllib.parse.urlparse(url) - hostname = url_parts.hostname - if url_parts.port is not None: - port = url_parts.port - - q.id = 0 - wire = q.to_wire() - manager = dns.quic.SyncQuicManager( - verify_mode=verify, server_name=hostname, h3=True - ) - - with manager: - connection = manager.connect(where, port, source, source_port) - (start, expiration) = _compute_times(timeout) - with connection.make_stream(timeout) as stream: - stream.send_h3(url, wire, post) - wire = stream.receive(_remaining(expiration)) - _check_status(stream.headers(), where, wire) - finish = time.time() - r = dns.message.from_wire( - wire, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = max(finish - start, 0.0) - if not q.is_response(r): - raise BadResponse - return r - - -def _udp_recv(sock, max_size, expiration): - """Reads a datagram from the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - while True: - try: - return sock.recvfrom(max_size) - except BlockingIOError: - _wait_for_readable(sock, expiration) - - -def _udp_send(sock, data, destination, expiration): - """Sends the specified datagram to destination over the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - while True: - try: - if destination: - return sock.sendto(data, destination) - else: - return sock.send(data) - except BlockingIOError: # pragma: no cover - _wait_for_writable(sock, expiration) - - -def send_udp( - sock: Any, - what: Union[dns.message.Message, bytes], - destination: Any, - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified UDP socket. - - *sock*, a ``socket``. - - *what*, a ``bytes`` or ``dns.message.Message``, the message to send. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where to send the query. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - what = what.to_wire() - sent_time = time.time() - n = _udp_send(sock, what, destination, expiration) - return (n, sent_time) - - -def receive_udp( - sock: Any, - destination: Optional[Any] = None, - expiration: Optional[float] = None, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - ignore_errors: bool = False, - query: Optional[dns.message.Message] = None, -) -> Any: - """Read a DNS message from a UDP socket. - - *sock*, a ``socket``. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where the message is expected to arrive from. - When receiving a response, this would be where the associated query was - sent. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from - unexpected sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if - the TC bit is set. - - Raises if the message is malformed, if network errors occur, of if - there is a timeout. - - If *destination* is not ``None``, returns a ``(dns.message.Message, float)`` - tuple of the received message and the received time. - - If *destination* is ``None``, returns a - ``(dns.message.Message, float, tuple)`` - tuple of the received message, the received time, and the address where - the message arrived from. - - *ignore_errors*, a ``bool``. If various format errors or response - mismatches occur, ignore them and keep listening for a valid response. - The default is ``False``. - - *query*, a ``dns.message.Message`` or ``None``. If not ``None`` and - *ignore_errors* is ``True``, check that the received message is a response - to this query, and if not keep listening for a valid response. - """ - - wire = b"" - while True: - (wire, from_address) = _udp_recv(sock, 65535, expiration) - if not _matches_destination( - sock.family, from_address, destination, ignore_unexpected - ): - continue - received_time = time.time() - try: - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - raise_on_truncation=raise_on_truncation, - ) - except dns.message.Truncated as e: - # If we got Truncated and not FORMERR, we at least got the header with TC - # set, and very likely the question section, so we'll re-raise if the - # message seems to be a response as we need to know when truncation happens. - # We need to check that it seems to be a response as we don't want a random - # injected message with TC set to cause us to bail out. - if ( - ignore_errors - and query is not None - and not query.is_response(e.message()) - ): - continue - else: - raise - except Exception: - if ignore_errors: - continue - else: - raise - if ignore_errors and query is not None and not query.is_response(r): - continue - if destination: - return (r, received_time) - else: - return (r, received_time, from_address) - - -def udp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - sock: Optional[Any] = None, - ignore_errors: bool = False, -) -> dns.message.Message: - """Return the response obtained after sending a query via UDP. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from - unexpected sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if - the TC bit is set. - - *sock*, a ``socket.socket``, or ``None``, the socket to use for the - query. If ``None``, the default, a socket is created. Note that - if a socket is provided, it must be a nonblocking datagram socket, - and the *source* and *source_port* are ignored. - - *ignore_errors*, a ``bool``. If various format errors or response - mismatches occur, ignore them and keep listening for a valid response. - The default is ``False``. - - Returns a ``dns.message.Message``. - """ - - wire = q.to_wire() - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - (begin_time, expiration) = _compute_times(timeout) - if sock: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) - else: - cm = _make_socket(af, socket.SOCK_DGRAM, source) - with cm as s: - send_udp(s, wire, destination, expiration) - (r, received_time) = receive_udp( - s, - destination, - expiration, - ignore_unexpected, - one_rr_per_rrset, - q.keyring, - q.mac, - ignore_trailing, - raise_on_truncation, - ignore_errors, - q, - ) - r.time = received_time - begin_time - # We don't need to check q.is_response() if we are in ignore_errors mode - # as receive_udp() will have checked it. - if not (ignore_errors or q.is_response(r)): - raise BadResponse - return r - assert ( - False # help mypy figure out we can't get here lgtm[py/unreachable-statement] - ) - - -def udp_with_fallback( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - udp_sock: Optional[Any] = None, - tcp_sock: Optional[Any] = None, - ignore_errors: bool = False, -) -> Tuple[dns.message.Message, bool]: - """Return the response to the query, trying UDP first and falling back - to TCP if UDP results in a truncated response. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query - times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source - address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. The default is - 0. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from unexpected - sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the - received message. - - *udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the UDP query. - If ``None``, the default, a socket is created. Note that if a socket is provided, - it must be a nonblocking datagram socket, and the *source* and *source_port* are - ignored for the UDP query. - - *tcp_sock*, a ``socket.socket``, or ``None``, the connected socket to use for the - TCP query. If ``None``, the default, a socket is created. Note that if a socket is - provided, it must be a nonblocking connected stream socket, and *where*, *source* - and *source_port* are ignored for the TCP query. - - *ignore_errors*, a ``bool``. If various format errors or response mismatches occur - while listening for UDP, ignore them and keep listening for a valid response. The - default is ``False``. - - Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True`` if and only if - TCP was used. - """ - try: - response = udp( - q, - where, - timeout, - port, - source, - source_port, - ignore_unexpected, - one_rr_per_rrset, - ignore_trailing, - True, - udp_sock, - ignore_errors, - ) - return (response, False) - except dns.message.Truncated: - response = tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - tcp_sock, - ) - return (response, True) - - -def _net_read(sock, count, expiration): - """Read the specified number of bytes from sock. Keep trying until we - either get the desired amount, or we hit EOF. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - s = b"" - while count > 0: - try: - n = sock.recv(count) - if n == b"": - raise EOFError("EOF") - count -= len(n) - s += n - except (BlockingIOError, ssl.SSLWantReadError): - _wait_for_readable(sock, expiration) - except ssl.SSLWantWriteError: # pragma: no cover - _wait_for_writable(sock, expiration) - return s - - -def _net_write(sock, data, expiration): - """Write the specified data to the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - current = 0 - l = len(data) - while current < l: - try: - current += sock.send(data[current:]) - except (BlockingIOError, ssl.SSLWantWriteError): - _wait_for_writable(sock, expiration) - except ssl.SSLWantReadError: # pragma: no cover - _wait_for_readable(sock, expiration) - - -def send_tcp( - sock: Any, - what: Union[dns.message.Message, bytes], - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified TCP socket. - - *sock*, a ``socket``. - - *what*, a ``bytes`` or ``dns.message.Message``, the message to send. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - tcpmsg = what.to_wire(prepend_length=True) - else: - # copying the wire into tcpmsg is inefficient, but lets us - # avoid writev() or doing a short write that would get pushed - # onto the net - tcpmsg = len(what).to_bytes(2, "big") + what - sent_time = time.time() - _net_write(sock, tcpmsg, expiration) - return (len(tcpmsg), sent_time) - - -def receive_tcp( - sock: Any, - expiration: Optional[float] = None, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, -) -> Tuple[dns.message.Message, float]: - """Read a DNS message from a TCP socket. - - *sock*, a ``socket``. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - Raises if the message is malformed, if network errors occur, of if - there is a timeout. - - Returns a ``(dns.message.Message, float)`` tuple of the received message - and the received time. - """ - - ldata = _net_read(sock, 2, expiration) - (l,) = struct.unpack("!H", ldata) - wire = _net_read(sock, l, expiration) - received_time = time.time() - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - return (r, received_time) - - -def _connect(s, address, expiration): - err = s.connect_ex(address) - if err == 0: - return - if err in (errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY): - _wait_for_writable(s, expiration) - err = s.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) - if err != 0: - raise OSError(err, os.strerror(err)) - - -def tcp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[Any] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via TCP. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *sock*, a ``socket.socket``, or ``None``, the connected socket to use for the - query. If ``None``, the default, a socket is created. Note that - if a socket is provided, it must be a nonblocking connected stream - socket, and *where*, *port*, *source* and *source_port* are ignored. - - Returns a ``dns.message.Message``. - """ - - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - if sock: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) - else: - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - cm = _make_socket(af, socket.SOCK_STREAM, source) - with cm as s: - if not sock: - # pylint: disable=possibly-used-before-assignment - _connect(s, destination, expiration) - send_tcp(s, wire, expiration) - (r, received_time) = receive_tcp( - s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - assert ( - False # help mypy figure out we can't get here lgtm[py/unreachable-statement] - ) - - -def _tls_handshake(s, expiration): - while True: - try: - s.do_handshake() - return - except ssl.SSLWantReadError: - _wait_for_readable(s, expiration) - except ssl.SSLWantWriteError: # pragma: no cover - _wait_for_writable(s, expiration) - - -def _make_dot_ssl_context( - server_hostname: Optional[str], verify: Union[bool, str] -) -> ssl.SSLContext: - cafile: Optional[str] = None - capath: Optional[str] = None - if isinstance(verify, str): - if os.path.isfile(verify): - cafile = verify - elif os.path.isdir(verify): - capath = verify - else: - raise ValueError("invalid verify string") - ssl_context = ssl.create_default_context(cafile=cafile, capath=capath) - ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 - if server_hostname is None: - ssl_context.check_hostname = False - ssl_context.set_alpn_protocols(["dot"]) - if verify is False: - ssl_context.verify_mode = ssl.CERT_NONE - return ssl_context - - -def tls( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[ssl.SSLSocket] = None, - ssl_context: Optional[ssl.SSLContext] = None, - server_hostname: Optional[str] = None, - verify: Union[bool, str] = True, -) -> dns.message.Message: - """Return the response obtained after sending a query via TLS. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 853. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *sock*, an ``ssl.SSLSocket``, or ``None``, the socket to use for - the query. If ``None``, the default, a socket is created. Note - that if a socket is provided, it must be a nonblocking connected - SSL stream socket, and *where*, *port*, *source*, *source_port*, - and *ssl_context* are ignored. - - *ssl_context*, an ``ssl.SSLContext``, the context to use when establishing - a TLS connection. If ``None``, the default, creates one with the default - configuration. - - *server_hostname*, a ``str`` containing the server's hostname. The - default is ``None``, which means that no hostname is known, and if an - SSL context is created, hostname checking will be disabled. - - *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification - of the server is done using the default CA bundle; if ``False``, then no - verification is done; if a `str` then it specifies the path to a certificate file or - directory which will be used for verification. - - Returns a ``dns.message.Message``. - - """ - - if sock: - # - # If a socket was provided, there's no special TLS handling needed. - # - return tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - sock, - ) - - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - if ssl_context is None and not sock: - ssl_context = _make_dot_ssl_context(server_hostname, verify) - - with _make_socket( - af, - socket.SOCK_STREAM, - source, - ssl_context=ssl_context, - server_hostname=server_hostname, - ) as s: - _connect(s, destination, expiration) - _tls_handshake(s, expiration) - send_tcp(s, wire, expiration) - (r, received_time) = receive_tcp( - s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - assert ( - False # help mypy figure out we can't get here lgtm[py/unreachable-statement] - ) - - -def quic( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - connection: Optional[dns.quic.SyncQuicConnection] = None, - verify: Union[bool, str] = True, - hostname: Optional[str] = None, - server_hostname: Optional[str] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via DNS-over-QUIC. - - *q*, a ``dns.message.Message``, the query to send. - - *where*, a ``str``, the nameserver IP address. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query - times out. If ``None``, the default, wait forever. - - *port*, a ``int``, the port to send the query to. The default is 853. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source - address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. The default is - 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the - received message. - - *connection*, a ``dns.quic.SyncQuicConnection``. If provided, the connection to use - to send the query. - - *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification - of the server is done using the default CA bundle; if ``False``, then no - verification is done; if a `str` then it specifies the path to a certificate file or - directory which will be used for verification. - - *hostname*, a ``str`` containing the server's hostname or ``None``. The default is - ``None``, which means that no hostname is known, and if an SSL context is created, - hostname checking will be disabled. This value is ignored if *url* is not - ``None``. - - *server_hostname*, a ``str`` or ``None``. This item is for backwards compatibility - only, and has the same meaning as *hostname*. - - Returns a ``dns.message.Message``. - """ - - if not dns.quic.have_quic: - raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover - - if server_hostname is not None and hostname is None: - hostname = server_hostname - - q.id = 0 - wire = q.to_wire() - the_connection: dns.quic.SyncQuicConnection - the_manager: dns.quic.SyncQuicManager - if connection: - manager: contextlib.AbstractContextManager = contextlib.nullcontext(None) - the_connection = connection - else: - manager = dns.quic.SyncQuicManager(verify_mode=verify, server_name=hostname) - the_manager = manager # for type checking happiness - - with manager: - if not connection: - the_connection = the_manager.connect(where, port, source, source_port) - (start, expiration) = _compute_times(timeout) - with the_connection.make_stream(timeout) as stream: - stream.send(wire, True) - wire = stream.receive(_remaining(expiration)) - finish = time.time() - r = dns.message.from_wire( - wire, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = max(finish - start, 0.0) - if not q.is_response(r): - raise BadResponse - return r - - -class UDPMode(enum.IntEnum): - """How should UDP be used in an IXFR from :py:func:`inbound_xfr()`? - - NEVER means "never use UDP; always use TCP" - TRY_FIRST means "try to use UDP but fall back to TCP if needed" - ONLY means "raise ``dns.xfr.UseTCP`` if trying UDP does not succeed" - """ - - NEVER = 0 - TRY_FIRST = 1 - ONLY = 2 - - -def _inbound_xfr( - txn_manager: dns.transaction.TransactionManager, - s: socket.socket, - query: dns.message.Message, - serial: Optional[int], - timeout: Optional[float], - expiration: float, -) -> Any: - """Given a socket, does the zone transfer.""" - rdtype = query.question[0].rdtype - is_ixfr = rdtype == dns.rdatatype.IXFR - origin = txn_manager.from_wire_origin() - wire = query.to_wire() - is_udp = s.type == socket.SOCK_DGRAM - if is_udp: - _udp_send(s, wire, None, expiration) - else: - tcpmsg = struct.pack("!H", len(wire)) + wire - _net_write(s, tcpmsg, expiration) - with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: - done = False - tsig_ctx = None - while not done: - (_, mexpiration) = _compute_times(timeout) - if mexpiration is None or ( - expiration is not None and mexpiration > expiration - ): - mexpiration = expiration - if is_udp: - (rwire, _) = _udp_recv(s, 65535, mexpiration) - else: - ldata = _net_read(s, 2, mexpiration) - (l,) = struct.unpack("!H", ldata) - rwire = _net_read(s, l, mexpiration) - r = dns.message.from_wire( - rwire, - keyring=query.keyring, - request_mac=query.mac, - xfr=True, - origin=origin, - tsig_ctx=tsig_ctx, - multi=(not is_udp), - one_rr_per_rrset=is_ixfr, - ) - done = inbound.process_message(r) - yield r - tsig_ctx = r.tsig_ctx - if query.keyring and not r.had_tsig: - raise dns.exception.FormError("missing TSIG") - - -def xfr( - where: str, - zone: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.AXFR, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - timeout: Optional[float] = None, - port: int = 53, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - keyname: Optional[Union[dns.name.Name, str]] = None, - relativize: bool = True, - lifetime: Optional[float] = None, - source: Optional[str] = None, - source_port: int = 0, - serial: int = 0, - use_udp: bool = False, - keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, -) -> Any: - """Return a generator for the responses to a zone transfer. - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *zone*, a ``dns.name.Name`` or ``str``, the name of the zone to transfer. - - *rdtype*, an ``int`` or ``str``, the type of zone transfer. The - default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be - used to do an incremental transfer instead. - - *rdclass*, an ``int`` or ``str``, the class of the zone transfer. - The default is ``dns.rdataclass.IN``. - - *timeout*, a ``float``, the number of seconds to wait for each - response message. If None, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *keyname*, a ``dns.name.Name`` or ``str``, the name of the TSIG - key to use. - - *relativize*, a ``bool``. If ``True``, all names in the zone will be - relativized to the zone origin. It is essential that the - relativize setting matches the one specified to - ``dns.zone.from_xfr()`` if using this generator to make a zone. - - *lifetime*, a ``float``, the total number of seconds to spend - doing the transfer. If ``None``, the default, then there is no - limit on the time the transfer may take. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *serial*, an ``int``, the SOA serial number to use as the base for - an IXFR diff sequence (only meaningful if *rdtype* is - ``dns.rdatatype.IXFR``). - - *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). - - *keyalgorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. - - Raises on errors, and so does the generator. - - Returns a generator of ``dns.message.Message`` objects. - """ - - class DummyTransactionManager(dns.transaction.TransactionManager): - def __init__(self, origin, relativize): - self.info = (origin, relativize, dns.name.empty if relativize else origin) - - def origin_information(self): - return self.info - - def get_class(self) -> dns.rdataclass.RdataClass: - raise NotImplementedError # pragma: no cover - - def reader(self): - raise NotImplementedError # pragma: no cover - - def writer(self, replacement: bool = False) -> dns.transaction.Transaction: - class DummyTransaction: - def nop(self, *args, **kw): - pass - - def __getattr__(self, _): - return self.nop - - return cast(dns.transaction.Transaction, DummyTransaction()) - - if isinstance(zone, str): - zone = dns.name.from_text(zone) - rdtype = dns.rdatatype.RdataType.make(rdtype) - q = dns.message.make_query(zone, rdtype, rdclass) - if rdtype == dns.rdatatype.IXFR: - rrset = q.find_rrset( - q.authority, zone, dns.rdataclass.IN, dns.rdatatype.SOA, create=True - ) - soa = dns.rdata.from_text("IN", "SOA", ". . %u 0 0 0 0" % serial) - rrset.add(soa, 0) - if keyring is not None: - q.use_tsig(keyring, keyname, algorithm=keyalgorithm) - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - (_, expiration) = _compute_times(lifetime) - tm = DummyTransactionManager(zone, relativize) - if use_udp and rdtype != dns.rdatatype.IXFR: - raise ValueError("cannot do a UDP AXFR") - sock_type = socket.SOCK_DGRAM if use_udp else socket.SOCK_STREAM - with _make_socket(af, sock_type, source) as s: - _connect(s, destination, expiration) - yield from _inbound_xfr(tm, s, q, serial, timeout, expiration) - - -def inbound_xfr( - where: str, - txn_manager: dns.transaction.TransactionManager, - query: Optional[dns.message.Message] = None, - port: int = 53, - timeout: Optional[float] = None, - lifetime: Optional[float] = None, - source: Optional[str] = None, - source_port: int = 0, - udp_mode: UDPMode = UDPMode.NEVER, -) -> None: - """Conduct an inbound transfer and apply it via a transaction from the - txn_manager. - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *txn_manager*, a ``dns.transaction.TransactionManager``, the txn_manager - for this transfer (typically a ``dns.zone.Zone``). - - *query*, the query to send. If not supplied, a default query is - constructed using information from the *txn_manager*. - - *port*, an ``int``, the port send the message to. The default is 53. - - *timeout*, a ``float``, the number of seconds to wait for each - response message. If None, the default, wait forever. - - *lifetime*, a ``float``, the total number of seconds to spend - doing the transfer. If ``None``, the default, then there is no - limit on the time the transfer may take. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *udp_mode*, a ``dns.query.UDPMode``, determines how UDP is used - for IXFRs. The default is ``dns.UDPMode.NEVER``, i.e. only use - TCP. Other possibilities are ``dns.UDPMode.TRY_FIRST``, which - means "try UDP but fallback to TCP if needed", and - ``dns.UDPMode.ONLY``, which means "try UDP and raise - ``dns.xfr.UseTCP`` if it does not succeed. - - Raises on errors. - """ - if query is None: - (query, serial) = dns.xfr.make_query(txn_manager) - else: - serial = dns.xfr.extract_serial_from_query(query) - - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - (_, expiration) = _compute_times(lifetime) - if query.question[0].rdtype == dns.rdatatype.IXFR and udp_mode != UDPMode.NEVER: - with _make_socket(af, socket.SOCK_DGRAM, source) as s: - _connect(s, destination, expiration) - try: - for _ in _inbound_xfr( - txn_manager, s, query, serial, timeout, expiration - ): - pass - return - except dns.xfr.UseTCP: - if udp_mode == UDPMode.ONLY: - raise - - with _make_socket(af, socket.SOCK_STREAM, source) as s: - _connect(s, destination, expiration) - for _ in _inbound_xfr(txn_manager, s, query, serial, timeout, expiration): - pass diff --git a/venv/lib/python3.12/site-packages/dns/quic/__init__.py b/venv/lib/python3.12/site-packages/dns/quic/__init__.py deleted file mode 100644 index 0750e729..00000000 --- a/venv/lib/python3.12/site-packages/dns/quic/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -from typing import List, Tuple - -import dns._features -import dns.asyncbackend - -if dns._features.have("doq"): - import aioquic.quic.configuration # type: ignore - - from dns._asyncbackend import NullContext - from dns.quic._asyncio import ( - AsyncioQuicConnection, - AsyncioQuicManager, - AsyncioQuicStream, - ) - from dns.quic._common import AsyncQuicConnection, AsyncQuicManager - from dns.quic._sync import SyncQuicConnection, SyncQuicManager, SyncQuicStream - - have_quic = True - - def null_factory( - *args, # pylint: disable=unused-argument - **kwargs, # pylint: disable=unused-argument - ): - return NullContext(None) - - def _asyncio_manager_factory( - context, *args, **kwargs # pylint: disable=unused-argument - ): - return AsyncioQuicManager(*args, **kwargs) - - # We have a context factory and a manager factory as for trio we need to have - # a nursery. - - _async_factories = {"asyncio": (null_factory, _asyncio_manager_factory)} - - if dns._features.have("trio"): - import trio - - from dns.quic._trio import ( # pylint: disable=ungrouped-imports - TrioQuicConnection, - TrioQuicManager, - TrioQuicStream, - ) - - def _trio_context_factory(): - return trio.open_nursery() - - def _trio_manager_factory(context, *args, **kwargs): - return TrioQuicManager(context, *args, **kwargs) - - _async_factories["trio"] = (_trio_context_factory, _trio_manager_factory) - - def factories_for_backend(backend=None): - if backend is None: - backend = dns.asyncbackend.get_default_backend() - return _async_factories[backend.name()] - -else: # pragma: no cover - have_quic = False - - from typing import Any - - class AsyncQuicStream: # type: ignore - pass - - class AsyncQuicConnection: # type: ignore - async def make_stream(self) -> Any: - raise NotImplementedError - - class SyncQuicStream: # type: ignore - pass - - class SyncQuicConnection: # type: ignore - def make_stream(self) -> Any: - raise NotImplementedError - - -Headers = List[Tuple[bytes, bytes]] diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index cb47d85d..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_asyncio.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_asyncio.cpython-312.pyc deleted file mode 100644 index b5eeeddc..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_asyncio.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_common.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_common.cpython-312.pyc deleted file mode 100644 index 69301f1c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_common.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_sync.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_sync.cpython-312.pyc deleted file mode 100644 index 94a9c9d1..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_sync.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_trio.cpython-312.pyc deleted file mode 100644 index 77155a4a..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/quic/__pycache__/_trio.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/quic/_asyncio.py b/venv/lib/python3.12/site-packages/dns/quic/_asyncio.py deleted file mode 100644 index f87515da..00000000 --- a/venv/lib/python3.12/site-packages/dns/quic/_asyncio.py +++ /dev/null @@ -1,267 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import asyncio -import socket -import ssl -import struct -import time - -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore -import aioquic.quic.events # type: ignore - -import dns.asyncbackend -import dns.exception -import dns.inet -from dns.quic._common import ( - QUIC_MAX_DATAGRAM, - AsyncQuicConnection, - AsyncQuicManager, - BaseQuicStream, - UnexpectedEOF, -) - - -class AsyncioQuicStream(BaseQuicStream): - def __init__(self, connection, stream_id): - super().__init__(connection, stream_id) - self._wake_up = asyncio.Condition() - - async def _wait_for_wake_up(self): - async with self._wake_up: - await self._wake_up.wait() - - async def wait_for(self, amount, expiration): - while True: - timeout = self._timeout_from_expiration(expiration) - if self._buffer.have(amount): - return - self._expecting = amount - try: - await asyncio.wait_for(self._wait_for_wake_up(), timeout) - except TimeoutError: - raise dns.exception.Timeout - self._expecting = 0 - - async def wait_for_end(self, expiration): - while True: - timeout = self._timeout_from_expiration(expiration) - if self._buffer.seen_end(): - return - try: - await asyncio.wait_for(self._wait_for_wake_up(), timeout) - except TimeoutError: - raise dns.exception.Timeout - - async def receive(self, timeout=None): - expiration = self._expiration_from_timeout(timeout) - if self._connection.is_h3(): - await self.wait_for_end(expiration) - return self._buffer.get_all() - else: - await self.wait_for(2, expiration) - (size,) = struct.unpack("!H", self._buffer.get(2)) - await self.wait_for(size, expiration) - return self._buffer.get(size) - - async def send(self, datagram, is_end=False): - data = self._encapsulate(datagram) - await self._connection.write(self._stream_id, data, is_end) - - async def _add_input(self, data, is_end): - if self._common_add_input(data, is_end): - async with self._wake_up: - self._wake_up.notify() - - async def close(self): - self._close() - - # Streams are async context managers - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.close() - async with self._wake_up: - self._wake_up.notify() - return False - - -class AsyncioQuicConnection(AsyncQuicConnection): - def __init__(self, connection, address, port, source, source_port, manager=None): - super().__init__(connection, address, port, source, source_port, manager) - self._socket = None - self._handshake_complete = asyncio.Event() - self._socket_created = asyncio.Event() - self._wake_timer = asyncio.Condition() - self._receiver_task = None - self._sender_task = None - self._wake_pending = False - - async def _receiver(self): - try: - af = dns.inet.af_for_address(self._address) - backend = dns.asyncbackend.get_backend("asyncio") - # Note that peer is a low-level address tuple, but make_socket() wants - # a high-level address tuple, so we convert. - self._socket = await backend.make_socket( - af, socket.SOCK_DGRAM, 0, self._source, (self._peer[0], self._peer[1]) - ) - self._socket_created.set() - async with self._socket: - while not self._done: - (datagram, address) = await self._socket.recvfrom( - QUIC_MAX_DATAGRAM, None - ) - if address[0] != self._peer[0] or address[1] != self._peer[1]: - continue - self._connection.receive_datagram(datagram, address, time.time()) - # Wake up the timer in case the sender is sleeping, as there may be - # stuff to send now. - await self._wakeup() - except Exception: - pass - finally: - self._done = True - await self._wakeup() - self._handshake_complete.set() - - async def _wakeup(self): - self._wake_pending = True - async with self._wake_timer: - self._wake_timer.notify_all() - - async def _wait_for_wake_timer(self): - async with self._wake_timer: - if not self._wake_pending: - await self._wake_timer.wait() - self._wake_pending = False - - async def _sender(self): - await self._socket_created.wait() - while not self._done: - datagrams = self._connection.datagrams_to_send(time.time()) - for datagram, address in datagrams: - assert address == self._peer - await self._socket.sendto(datagram, self._peer, None) - (expiration, interval) = self._get_timer_values() - try: - await asyncio.wait_for(self._wait_for_wake_timer(), interval) - except Exception: - pass - self._handle_timer(expiration) - await self._handle_events() - - async def _handle_events(self): - count = 0 - while True: - event = self._connection.next_event() - if event is None: - return - if isinstance(event, aioquic.quic.events.StreamDataReceived): - if self.is_h3(): - h3_events = self._h3_conn.handle_event(event) - for h3_event in h3_events: - if isinstance(h3_event, aioquic.h3.events.HeadersReceived): - stream = self._streams.get(event.stream_id) - if stream: - if stream._headers is None: - stream._headers = h3_event.headers - elif stream._trailers is None: - stream._trailers = h3_event.headers - if h3_event.stream_ended: - await stream._add_input(b"", True) - elif isinstance(h3_event, aioquic.h3.events.DataReceived): - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input( - h3_event.data, h3_event.stream_ended - ) - else: - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input(event.data, event.end_stream) - elif isinstance(event, aioquic.quic.events.HandshakeCompleted): - self._handshake_complete.set() - elif isinstance(event, aioquic.quic.events.ConnectionTerminated): - self._done = True - self._receiver_task.cancel() - elif isinstance(event, aioquic.quic.events.StreamReset): - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input(b"", True) - - count += 1 - if count > 10: - # yield - count = 0 - await asyncio.sleep(0) - - async def write(self, stream, data, is_end=False): - self._connection.send_stream_data(stream, data, is_end) - await self._wakeup() - - def run(self): - if self._closed: - return - self._receiver_task = asyncio.Task(self._receiver()) - self._sender_task = asyncio.Task(self._sender()) - - async def make_stream(self, timeout=None): - try: - await asyncio.wait_for(self._handshake_complete.wait(), timeout) - except TimeoutError: - raise dns.exception.Timeout - if self._done: - raise UnexpectedEOF - stream_id = self._connection.get_next_available_stream_id(False) - stream = AsyncioQuicStream(self, stream_id) - self._streams[stream_id] = stream - return stream - - async def close(self): - if not self._closed: - self._manager.closed(self._peer[0], self._peer[1]) - self._closed = True - self._connection.close() - # sender might be blocked on this, so set it - self._socket_created.set() - await self._wakeup() - try: - await self._receiver_task - except asyncio.CancelledError: - pass - try: - await self._sender_task - except asyncio.CancelledError: - pass - await self._socket.close() - - -class AsyncioQuicManager(AsyncQuicManager): - def __init__( - self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None, h3=False - ): - super().__init__(conf, verify_mode, AsyncioQuicConnection, server_name, h3) - - def connect( - self, address, port=853, source=None, source_port=0, want_session_ticket=True - ): - (connection, start) = self._connect( - address, port, source, source_port, want_session_ticket - ) - if start: - connection.run() - return connection - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - # Copy the iterator into a list as exiting things will mutate the connections - # table. - connections = list(self._connections.values()) - for connection in connections: - await connection.close() - return False diff --git a/venv/lib/python3.12/site-packages/dns/quic/_common.py b/venv/lib/python3.12/site-packages/dns/quic/_common.py deleted file mode 100644 index ce575b03..00000000 --- a/venv/lib/python3.12/site-packages/dns/quic/_common.py +++ /dev/null @@ -1,339 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import base64 -import copy -import functools -import socket -import struct -import time -import urllib -from typing import Any, Optional - -import aioquic.h3.connection # type: ignore -import aioquic.h3.events # type: ignore -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore - -import dns.inet - -QUIC_MAX_DATAGRAM = 2048 -MAX_SESSION_TICKETS = 8 -# If we hit the max sessions limit we will delete this many of the oldest connections. -# The value must be a integer > 0 and <= MAX_SESSION_TICKETS. -SESSIONS_TO_DELETE = MAX_SESSION_TICKETS // 4 - - -class UnexpectedEOF(Exception): - pass - - -class Buffer: - def __init__(self): - self._buffer = b"" - self._seen_end = False - - def put(self, data, is_end): - if self._seen_end: - return - self._buffer += data - if is_end: - self._seen_end = True - - def have(self, amount): - if len(self._buffer) >= amount: - return True - if self._seen_end: - raise UnexpectedEOF - return False - - def seen_end(self): - return self._seen_end - - def get(self, amount): - assert self.have(amount) - data = self._buffer[:amount] - self._buffer = self._buffer[amount:] - return data - - def get_all(self): - assert self.seen_end() - data = self._buffer - self._buffer = b"" - return data - - -class BaseQuicStream: - def __init__(self, connection, stream_id): - self._connection = connection - self._stream_id = stream_id - self._buffer = Buffer() - self._expecting = 0 - self._headers = None - self._trailers = None - - def id(self): - return self._stream_id - - def headers(self): - return self._headers - - def trailers(self): - return self._trailers - - def _expiration_from_timeout(self, timeout): - if timeout is not None: - expiration = time.time() + timeout - else: - expiration = None - return expiration - - def _timeout_from_expiration(self, expiration): - if expiration is not None: - timeout = max(expiration - time.time(), 0.0) - else: - timeout = None - return timeout - - # Subclass must implement receive() as sync / async and which returns a message - # or raises. - - # Subclass must implement send() as sync / async and which takes a message and - # an EOF indicator. - - def send_h3(self, url, datagram, post=True): - if not self._connection.is_h3(): - raise SyntaxError("cannot send H3 to a non-H3 connection") - url_parts = urllib.parse.urlparse(url) - path = url_parts.path.encode() - if post: - method = b"POST" - else: - method = b"GET" - path += b"?dns=" + base64.urlsafe_b64encode(datagram).rstrip(b"=") - headers = [ - (b":method", method), - (b":scheme", url_parts.scheme.encode()), - (b":authority", url_parts.netloc.encode()), - (b":path", path), - (b"accept", b"application/dns-message"), - ] - if post: - headers.extend( - [ - (b"content-type", b"application/dns-message"), - (b"content-length", str(len(datagram)).encode()), - ] - ) - self._connection.send_headers(self._stream_id, headers, not post) - if post: - self._connection.send_data(self._stream_id, datagram, True) - - def _encapsulate(self, datagram): - if self._connection.is_h3(): - return datagram - l = len(datagram) - return struct.pack("!H", l) + datagram - - def _common_add_input(self, data, is_end): - self._buffer.put(data, is_end) - try: - return ( - self._expecting > 0 and self._buffer.have(self._expecting) - ) or self._buffer.seen_end - except UnexpectedEOF: - return True - - def _close(self): - self._connection.close_stream(self._stream_id) - self._buffer.put(b"", True) # send EOF in case we haven't seen it. - - -class BaseQuicConnection: - def __init__( - self, - connection, - address, - port, - source=None, - source_port=0, - manager=None, - ): - self._done = False - self._connection = connection - self._address = address - self._port = port - self._closed = False - self._manager = manager - self._streams = {} - if manager.is_h3(): - self._h3_conn = aioquic.h3.connection.H3Connection(connection, False) - else: - self._h3_conn = None - self._af = dns.inet.af_for_address(address) - self._peer = dns.inet.low_level_address_tuple((address, port)) - if source is None and source_port != 0: - if self._af == socket.AF_INET: - source = "0.0.0.0" - elif self._af == socket.AF_INET6: - source = "::" - else: - raise NotImplementedError - if source: - self._source = (source, source_port) - else: - self._source = None - - def is_h3(self): - return self._h3_conn is not None - - def close_stream(self, stream_id): - del self._streams[stream_id] - - def send_headers(self, stream_id, headers, is_end=False): - self._h3_conn.send_headers(stream_id, headers, is_end) - - def send_data(self, stream_id, data, is_end=False): - self._h3_conn.send_data(stream_id, data, is_end) - - def _get_timer_values(self, closed_is_special=True): - now = time.time() - expiration = self._connection.get_timer() - if expiration is None: - expiration = now + 3600 # arbitrary "big" value - interval = max(expiration - now, 0) - if self._closed and closed_is_special: - # lower sleep interval to avoid a race in the closing process - # which can lead to higher latency closing due to sleeping when - # we have events. - interval = min(interval, 0.05) - return (expiration, interval) - - def _handle_timer(self, expiration): - now = time.time() - if expiration <= now: - self._connection.handle_timer(now) - - -class AsyncQuicConnection(BaseQuicConnection): - async def make_stream(self, timeout: Optional[float] = None) -> Any: - pass - - -class BaseQuicManager: - def __init__( - self, conf, verify_mode, connection_factory, server_name=None, h3=False - ): - self._connections = {} - self._connection_factory = connection_factory - self._session_tickets = {} - self._tokens = {} - self._h3 = h3 - if conf is None: - verify_path = None - if isinstance(verify_mode, str): - verify_path = verify_mode - verify_mode = True - if h3: - alpn_protocols = ["h3"] - else: - alpn_protocols = ["doq", "doq-i03"] - conf = aioquic.quic.configuration.QuicConfiguration( - alpn_protocols=alpn_protocols, - verify_mode=verify_mode, - server_name=server_name, - ) - if verify_path is not None: - conf.load_verify_locations(verify_path) - self._conf = conf - - def _connect( - self, - address, - port=853, - source=None, - source_port=0, - want_session_ticket=True, - want_token=True, - ): - connection = self._connections.get((address, port)) - if connection is not None: - return (connection, False) - conf = self._conf - if want_session_ticket: - try: - session_ticket = self._session_tickets.pop((address, port)) - # We found a session ticket, so make a configuration that uses it. - conf = copy.copy(conf) - conf.session_ticket = session_ticket - except KeyError: - # No session ticket. - pass - # Whether or not we found a session ticket, we want a handler to save - # one. - session_ticket_handler = functools.partial( - self.save_session_ticket, address, port - ) - else: - session_ticket_handler = None - if want_token: - try: - token = self._tokens.pop((address, port)) - # We found a token, so make a configuration that uses it. - conf = copy.copy(conf) - conf.token = token - except KeyError: - # No token - pass - # Whether or not we found a token, we want a handler to save # one. - token_handler = functools.partial(self.save_token, address, port) - else: - token_handler = None - - qconn = aioquic.quic.connection.QuicConnection( - configuration=conf, - session_ticket_handler=session_ticket_handler, - token_handler=token_handler, - ) - lladdress = dns.inet.low_level_address_tuple((address, port)) - qconn.connect(lladdress, time.time()) - connection = self._connection_factory( - qconn, address, port, source, source_port, self - ) - self._connections[(address, port)] = connection - return (connection, True) - - def closed(self, address, port): - try: - del self._connections[(address, port)] - except KeyError: - pass - - def is_h3(self): - return self._h3 - - def save_session_ticket(self, address, port, ticket): - # We rely on dictionaries keys() being in insertion order here. We - # can't just popitem() as that would be LIFO which is the opposite of - # what we want. - l = len(self._session_tickets) - if l >= MAX_SESSION_TICKETS: - keys_to_delete = list(self._session_tickets.keys())[0:SESSIONS_TO_DELETE] - for key in keys_to_delete: - del self._session_tickets[key] - self._session_tickets[(address, port)] = ticket - - def save_token(self, address, port, token): - # We rely on dictionaries keys() being in insertion order here. We - # can't just popitem() as that would be LIFO which is the opposite of - # what we want. - l = len(self._tokens) - if l >= MAX_SESSION_TICKETS: - keys_to_delete = list(self._tokens.keys())[0:SESSIONS_TO_DELETE] - for key in keys_to_delete: - del self._tokens[key] - self._tokens[(address, port)] = token - - -class AsyncQuicManager(BaseQuicManager): - def connect(self, address, port=853, source=None, source_port=0): - raise NotImplementedError diff --git a/venv/lib/python3.12/site-packages/dns/quic/_sync.py b/venv/lib/python3.12/site-packages/dns/quic/_sync.py deleted file mode 100644 index 473d1f48..00000000 --- a/venv/lib/python3.12/site-packages/dns/quic/_sync.py +++ /dev/null @@ -1,295 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import selectors -import socket -import ssl -import struct -import threading -import time - -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore -import aioquic.quic.events # type: ignore - -import dns.exception -import dns.inet -from dns.quic._common import ( - QUIC_MAX_DATAGRAM, - BaseQuicConnection, - BaseQuicManager, - BaseQuicStream, - UnexpectedEOF, -) - -# Function used to create a socket. Can be overridden if needed in special -# situations. -socket_factory = socket.socket - - -class SyncQuicStream(BaseQuicStream): - def __init__(self, connection, stream_id): - super().__init__(connection, stream_id) - self._wake_up = threading.Condition() - self._lock = threading.Lock() - - def wait_for(self, amount, expiration): - while True: - timeout = self._timeout_from_expiration(expiration) - with self._lock: - if self._buffer.have(amount): - return - self._expecting = amount - with self._wake_up: - if not self._wake_up.wait(timeout): - raise dns.exception.Timeout - self._expecting = 0 - - def wait_for_end(self, expiration): - while True: - timeout = self._timeout_from_expiration(expiration) - with self._lock: - if self._buffer.seen_end(): - return - with self._wake_up: - if not self._wake_up.wait(timeout): - raise dns.exception.Timeout - - def receive(self, timeout=None): - expiration = self._expiration_from_timeout(timeout) - if self._connection.is_h3(): - self.wait_for_end(expiration) - with self._lock: - return self._buffer.get_all() - else: - self.wait_for(2, expiration) - with self._lock: - (size,) = struct.unpack("!H", self._buffer.get(2)) - self.wait_for(size, expiration) - with self._lock: - return self._buffer.get(size) - - def send(self, datagram, is_end=False): - data = self._encapsulate(datagram) - self._connection.write(self._stream_id, data, is_end) - - def _add_input(self, data, is_end): - if self._common_add_input(data, is_end): - with self._wake_up: - self._wake_up.notify() - - def close(self): - with self._lock: - self._close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - with self._wake_up: - self._wake_up.notify() - return False - - -class SyncQuicConnection(BaseQuicConnection): - def __init__(self, connection, address, port, source, source_port, manager): - super().__init__(connection, address, port, source, source_port, manager) - self._socket = socket_factory(self._af, socket.SOCK_DGRAM, 0) - if self._source is not None: - try: - self._socket.bind( - dns.inet.low_level_address_tuple(self._source, self._af) - ) - except Exception: - self._socket.close() - raise - self._socket.connect(self._peer) - (self._send_wakeup, self._receive_wakeup) = socket.socketpair() - self._receive_wakeup.setblocking(False) - self._socket.setblocking(False) - self._handshake_complete = threading.Event() - self._worker_thread = None - self._lock = threading.Lock() - - def _read(self): - count = 0 - while count < 10: - count += 1 - try: - datagram = self._socket.recv(QUIC_MAX_DATAGRAM) - except BlockingIOError: - return - with self._lock: - self._connection.receive_datagram(datagram, self._peer, time.time()) - - def _drain_wakeup(self): - while True: - try: - self._receive_wakeup.recv(32) - except BlockingIOError: - return - - def _worker(self): - try: - sel = selectors.DefaultSelector() - sel.register(self._socket, selectors.EVENT_READ, self._read) - sel.register(self._receive_wakeup, selectors.EVENT_READ, self._drain_wakeup) - while not self._done: - (expiration, interval) = self._get_timer_values(False) - items = sel.select(interval) - for key, _ in items: - key.data() - with self._lock: - self._handle_timer(expiration) - self._handle_events() - with self._lock: - datagrams = self._connection.datagrams_to_send(time.time()) - for datagram, _ in datagrams: - try: - self._socket.send(datagram) - except BlockingIOError: - # we let QUIC handle any lossage - pass - finally: - with self._lock: - self._done = True - self._socket.close() - # Ensure anyone waiting for this gets woken up. - self._handshake_complete.set() - - def _handle_events(self): - while True: - with self._lock: - event = self._connection.next_event() - if event is None: - return - if isinstance(event, aioquic.quic.events.StreamDataReceived): - if self.is_h3(): - h3_events = self._h3_conn.handle_event(event) - for h3_event in h3_events: - if isinstance(h3_event, aioquic.h3.events.HeadersReceived): - with self._lock: - stream = self._streams.get(event.stream_id) - if stream: - if stream._headers is None: - stream._headers = h3_event.headers - elif stream._trailers is None: - stream._trailers = h3_event.headers - if h3_event.stream_ended: - stream._add_input(b"", True) - elif isinstance(h3_event, aioquic.h3.events.DataReceived): - with self._lock: - stream = self._streams.get(event.stream_id) - if stream: - stream._add_input(h3_event.data, h3_event.stream_ended) - else: - with self._lock: - stream = self._streams.get(event.stream_id) - if stream: - stream._add_input(event.data, event.end_stream) - elif isinstance(event, aioquic.quic.events.HandshakeCompleted): - self._handshake_complete.set() - elif isinstance(event, aioquic.quic.events.ConnectionTerminated): - with self._lock: - self._done = True - elif isinstance(event, aioquic.quic.events.StreamReset): - with self._lock: - stream = self._streams.get(event.stream_id) - if stream: - stream._add_input(b"", True) - - def write(self, stream, data, is_end=False): - with self._lock: - self._connection.send_stream_data(stream, data, is_end) - self._send_wakeup.send(b"\x01") - - def send_headers(self, stream_id, headers, is_end=False): - with self._lock: - super().send_headers(stream_id, headers, is_end) - if is_end: - self._send_wakeup.send(b"\x01") - - def send_data(self, stream_id, data, is_end=False): - with self._lock: - super().send_data(stream_id, data, is_end) - if is_end: - self._send_wakeup.send(b"\x01") - - def run(self): - if self._closed: - return - self._worker_thread = threading.Thread(target=self._worker) - self._worker_thread.start() - - def make_stream(self, timeout=None): - if not self._handshake_complete.wait(timeout): - raise dns.exception.Timeout - with self._lock: - if self._done: - raise UnexpectedEOF - stream_id = self._connection.get_next_available_stream_id(False) - stream = SyncQuicStream(self, stream_id) - self._streams[stream_id] = stream - return stream - - def close_stream(self, stream_id): - with self._lock: - super().close_stream(stream_id) - - def close(self): - with self._lock: - if self._closed: - return - self._manager.closed(self._peer[0], self._peer[1]) - self._closed = True - self._connection.close() - self._send_wakeup.send(b"\x01") - self._worker_thread.join() - - -class SyncQuicManager(BaseQuicManager): - def __init__( - self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None, h3=False - ): - super().__init__(conf, verify_mode, SyncQuicConnection, server_name, h3) - self._lock = threading.Lock() - - def connect( - self, - address, - port=853, - source=None, - source_port=0, - want_session_ticket=True, - want_token=True, - ): - with self._lock: - (connection, start) = self._connect( - address, port, source, source_port, want_session_ticket, want_token - ) - if start: - connection.run() - return connection - - def closed(self, address, port): - with self._lock: - super().closed(address, port) - - def save_session_ticket(self, address, port, ticket): - with self._lock: - super().save_session_ticket(address, port, ticket) - - def save_token(self, address, port, token): - with self._lock: - super().save_token(address, port, token) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - # Copy the iterator into a list as exiting things will mutate the connections - # table. - connections = list(self._connections.values()) - for connection in connections: - connection.close() - return False diff --git a/venv/lib/python3.12/site-packages/dns/quic/_trio.py b/venv/lib/python3.12/site-packages/dns/quic/_trio.py deleted file mode 100644 index ae79f369..00000000 --- a/venv/lib/python3.12/site-packages/dns/quic/_trio.py +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import socket -import ssl -import struct -import time - -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore -import aioquic.quic.events # type: ignore -import trio - -import dns.exception -import dns.inet -from dns._asyncbackend import NullContext -from dns.quic._common import ( - QUIC_MAX_DATAGRAM, - AsyncQuicConnection, - AsyncQuicManager, - BaseQuicStream, - UnexpectedEOF, -) - - -class TrioQuicStream(BaseQuicStream): - def __init__(self, connection, stream_id): - super().__init__(connection, stream_id) - self._wake_up = trio.Condition() - - async def wait_for(self, amount): - while True: - if self._buffer.have(amount): - return - self._expecting = amount - async with self._wake_up: - await self._wake_up.wait() - self._expecting = 0 - - async def wait_for_end(self): - while True: - if self._buffer.seen_end(): - return - async with self._wake_up: - await self._wake_up.wait() - - async def receive(self, timeout=None): - if timeout is None: - context = NullContext(None) - else: - context = trio.move_on_after(timeout) - with context: - if self._connection.is_h3(): - await self.wait_for_end() - return self._buffer.get_all() - else: - await self.wait_for(2) - (size,) = struct.unpack("!H", self._buffer.get(2)) - await self.wait_for(size) - return self._buffer.get(size) - raise dns.exception.Timeout - - async def send(self, datagram, is_end=False): - data = self._encapsulate(datagram) - await self._connection.write(self._stream_id, data, is_end) - - async def _add_input(self, data, is_end): - if self._common_add_input(data, is_end): - async with self._wake_up: - self._wake_up.notify() - - async def close(self): - self._close() - - # Streams are async context managers - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.close() - async with self._wake_up: - self._wake_up.notify() - return False - - -class TrioQuicConnection(AsyncQuicConnection): - def __init__(self, connection, address, port, source, source_port, manager=None): - super().__init__(connection, address, port, source, source_port, manager) - self._socket = trio.socket.socket(self._af, socket.SOCK_DGRAM, 0) - self._handshake_complete = trio.Event() - self._run_done = trio.Event() - self._worker_scope = None - self._send_pending = False - - async def _worker(self): - try: - if self._source: - await self._socket.bind( - dns.inet.low_level_address_tuple(self._source, self._af) - ) - await self._socket.connect(self._peer) - while not self._done: - (expiration, interval) = self._get_timer_values(False) - if self._send_pending: - # Do not block forever if sends are pending. Even though we - # have a wake-up mechanism if we've already started the blocking - # read, the possibility of context switching in send means that - # more writes can happen while we have no wake up context, so - # we need self._send_pending to avoid (effectively) a "lost wakeup" - # race. - interval = 0.0 - with trio.CancelScope( - deadline=trio.current_time() + interval - ) as self._worker_scope: - datagram = await self._socket.recv(QUIC_MAX_DATAGRAM) - self._connection.receive_datagram(datagram, self._peer, time.time()) - self._worker_scope = None - self._handle_timer(expiration) - await self._handle_events() - # We clear this now, before sending anything, as sending can cause - # context switches that do more sends. We want to know if that - # happens so we don't block a long time on the recv() above. - self._send_pending = False - datagrams = self._connection.datagrams_to_send(time.time()) - for datagram, _ in datagrams: - await self._socket.send(datagram) - finally: - self._done = True - self._socket.close() - self._handshake_complete.set() - - async def _handle_events(self): - count = 0 - while True: - event = self._connection.next_event() - if event is None: - return - if isinstance(event, aioquic.quic.events.StreamDataReceived): - if self.is_h3(): - h3_events = self._h3_conn.handle_event(event) - for h3_event in h3_events: - if isinstance(h3_event, aioquic.h3.events.HeadersReceived): - stream = self._streams.get(event.stream_id) - if stream: - if stream._headers is None: - stream._headers = h3_event.headers - elif stream._trailers is None: - stream._trailers = h3_event.headers - if h3_event.stream_ended: - await stream._add_input(b"", True) - elif isinstance(h3_event, aioquic.h3.events.DataReceived): - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input( - h3_event.data, h3_event.stream_ended - ) - else: - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input(event.data, event.end_stream) - elif isinstance(event, aioquic.quic.events.HandshakeCompleted): - self._handshake_complete.set() - elif isinstance(event, aioquic.quic.events.ConnectionTerminated): - self._done = True - self._socket.close() - elif isinstance(event, aioquic.quic.events.StreamReset): - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input(b"", True) - count += 1 - if count > 10: - # yield - count = 0 - await trio.sleep(0) - - async def write(self, stream, data, is_end=False): - self._connection.send_stream_data(stream, data, is_end) - self._send_pending = True - if self._worker_scope is not None: - self._worker_scope.cancel() - - async def run(self): - if self._closed: - return - async with trio.open_nursery() as nursery: - nursery.start_soon(self._worker) - self._run_done.set() - - async def make_stream(self, timeout=None): - if timeout is None: - context = NullContext(None) - else: - context = trio.move_on_after(timeout) - with context: - await self._handshake_complete.wait() - if self._done: - raise UnexpectedEOF - stream_id = self._connection.get_next_available_stream_id(False) - stream = TrioQuicStream(self, stream_id) - self._streams[stream_id] = stream - return stream - raise dns.exception.Timeout - - async def close(self): - if not self._closed: - self._manager.closed(self._peer[0], self._peer[1]) - self._closed = True - self._connection.close() - self._send_pending = True - if self._worker_scope is not None: - self._worker_scope.cancel() - await self._run_done.wait() - - -class TrioQuicManager(AsyncQuicManager): - def __init__( - self, - nursery, - conf=None, - verify_mode=ssl.CERT_REQUIRED, - server_name=None, - h3=False, - ): - super().__init__(conf, verify_mode, TrioQuicConnection, server_name, h3) - self._nursery = nursery - - def connect( - self, address, port=853, source=None, source_port=0, want_session_ticket=True - ): - (connection, start) = self._connect( - address, port, source, source_port, want_session_ticket - ) - if start: - self._nursery.start_soon(connection.run) - return connection - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - # Copy the iterator into a list as exiting things will mutate the connections - # table. - connections = list(self._connections.values()) - for connection in connections: - await connection.close() - return False diff --git a/venv/lib/python3.12/site-packages/dns/rcode.py b/venv/lib/python3.12/site-packages/dns/rcode.py deleted file mode 100644 index 8e6386f8..00000000 --- a/venv/lib/python3.12/site-packages/dns/rcode.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Result Codes.""" - -from typing import Tuple - -import dns.enum -import dns.exception - - -class Rcode(dns.enum.IntEnum): - #: No error - NOERROR = 0 - #: Format error - FORMERR = 1 - #: Server failure - SERVFAIL = 2 - #: Name does not exist ("Name Error" in RFC 1025 terminology). - NXDOMAIN = 3 - #: Not implemented - NOTIMP = 4 - #: Refused - REFUSED = 5 - #: Name exists. - YXDOMAIN = 6 - #: RRset exists. - YXRRSET = 7 - #: RRset does not exist. - NXRRSET = 8 - #: Not authoritative. - NOTAUTH = 9 - #: Name not in zone. - NOTZONE = 10 - #: DSO-TYPE Not Implemented - DSOTYPENI = 11 - #: Bad EDNS version. - BADVERS = 16 - #: TSIG Signature Failure - BADSIG = 16 - #: Key not recognized. - BADKEY = 17 - #: Signature out of time window. - BADTIME = 18 - #: Bad TKEY Mode. - BADMODE = 19 - #: Duplicate key name. - BADNAME = 20 - #: Algorithm not supported. - BADALG = 21 - #: Bad Truncation - BADTRUNC = 22 - #: Bad/missing Server Cookie - BADCOOKIE = 23 - - @classmethod - def _maximum(cls): - return 4095 - - @classmethod - def _unknown_exception_class(cls): - return UnknownRcode - - -class UnknownRcode(dns.exception.DNSException): - """A DNS rcode is unknown.""" - - -def from_text(text: str) -> Rcode: - """Convert text into an rcode. - - *text*, a ``str``, the textual rcode or an integer in textual form. - - Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. - - Returns a ``dns.rcode.Rcode``. - """ - - return Rcode.from_text(text) - - -def from_flags(flags: int, ednsflags: int) -> Rcode: - """Return the rcode value encoded by flags and ednsflags. - - *flags*, an ``int``, the DNS flags field. - - *ednsflags*, an ``int``, the EDNS flags field. - - Raises ``ValueError`` if rcode is < 0 or > 4095 - - Returns a ``dns.rcode.Rcode``. - """ - - value = (flags & 0x000F) | ((ednsflags >> 20) & 0xFF0) - return Rcode.make(value) - - -def to_flags(value: Rcode) -> Tuple[int, int]: - """Return a (flags, ednsflags) tuple which encodes the rcode. - - *value*, a ``dns.rcode.Rcode``, the rcode. - - Raises ``ValueError`` if rcode is < 0 or > 4095. - - Returns an ``(int, int)`` tuple. - """ - - if value < 0 or value > 4095: - raise ValueError("rcode must be >= 0 and <= 4095") - v = value & 0xF - ev = (value & 0xFF0) << 20 - return (v, ev) - - -def to_text(value: Rcode, tsig: bool = False) -> str: - """Convert rcode into text. - - *value*, a ``dns.rcode.Rcode``, the rcode. - - Raises ``ValueError`` if rcode is < 0 or > 4095. - - Returns a ``str``. - """ - - if tsig and value == Rcode.BADVERS: - return "BADSIG" - return Rcode.to_text(value) - - -### BEGIN generated Rcode constants - -NOERROR = Rcode.NOERROR -FORMERR = Rcode.FORMERR -SERVFAIL = Rcode.SERVFAIL -NXDOMAIN = Rcode.NXDOMAIN -NOTIMP = Rcode.NOTIMP -REFUSED = Rcode.REFUSED -YXDOMAIN = Rcode.YXDOMAIN -YXRRSET = Rcode.YXRRSET -NXRRSET = Rcode.NXRRSET -NOTAUTH = Rcode.NOTAUTH -NOTZONE = Rcode.NOTZONE -DSOTYPENI = Rcode.DSOTYPENI -BADVERS = Rcode.BADVERS -BADSIG = Rcode.BADSIG -BADKEY = Rcode.BADKEY -BADTIME = Rcode.BADTIME -BADMODE = Rcode.BADMODE -BADNAME = Rcode.BADNAME -BADALG = Rcode.BADALG -BADTRUNC = Rcode.BADTRUNC -BADCOOKIE = Rcode.BADCOOKIE - -### END generated Rcode constants diff --git a/venv/lib/python3.12/site-packages/dns/rdata.py b/venv/lib/python3.12/site-packages/dns/rdata.py deleted file mode 100644 index 8099c26a..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdata.py +++ /dev/null @@ -1,911 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdata.""" - -import base64 -import binascii -import inspect -import io -import itertools -import random -from importlib import import_module -from typing import Any, Dict, Optional, Tuple, Union - -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.ipv6 -import dns.name -import dns.rdataclass -import dns.rdatatype -import dns.tokenizer -import dns.ttl -import dns.wire - -_chunksize = 32 - -# We currently allow comparisons for rdata with relative names for backwards -# compatibility, but in the future we will not, as these kinds of comparisons -# can lead to subtle bugs if code is not carefully written. -# -# This switch allows the future behavior to be turned on so code can be -# tested with it. -_allow_relative_comparisons = True - - -class NoRelativeRdataOrdering(dns.exception.DNSException): - """An attempt was made to do an ordered comparison of one or more - rdata with relative names. The only reliable way of sorting rdata - is to use non-relativized rdata. - - """ - - -def _wordbreak(data, chunksize=_chunksize, separator=b" "): - """Break a binary string into chunks of chunksize characters separated by - a space. - """ - - if not chunksize: - return data.decode() - return separator.join( - [data[i : i + chunksize] for i in range(0, len(data), chunksize)] - ).decode() - - -# pylint: disable=unused-argument - - -def _hexify(data, chunksize=_chunksize, separator=b" ", **kw): - """Convert a binary string into its hex encoding, broken up into chunks - of chunksize characters separated by a separator. - """ - - return _wordbreak(binascii.hexlify(data), chunksize, separator) - - -def _base64ify(data, chunksize=_chunksize, separator=b" ", **kw): - """Convert a binary string into its base64 encoding, broken up into chunks - of chunksize characters separated by a separator. - """ - - return _wordbreak(base64.b64encode(data), chunksize, separator) - - -# pylint: enable=unused-argument - -__escaped = b'"\\' - - -def _escapify(qstring): - """Escape the characters in a quoted string which need it.""" - - if isinstance(qstring, str): - qstring = qstring.encode() - if not isinstance(qstring, bytearray): - qstring = bytearray(qstring) - - text = "" - for c in qstring: - if c in __escaped: - text += "\\" + chr(c) - elif c >= 0x20 and c < 0x7F: - text += chr(c) - else: - text += "\\%03d" % c - return text - - -def _truncate_bitmap(what): - """Determine the index of greatest byte that isn't all zeros, and - return the bitmap that contains all the bytes less than that index. - """ - - for i in range(len(what) - 1, -1, -1): - if what[i] != 0: - return what[0 : i + 1] - return what[0:1] - - -# So we don't have to edit all the rdata classes... -_constify = dns.immutable.constify - - -@dns.immutable.immutable -class Rdata: - """Base class for all DNS rdata types.""" - - __slots__ = ["rdclass", "rdtype", "rdcomment"] - - def __init__(self, rdclass, rdtype): - """Initialize an rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - """ - - self.rdclass = self._as_rdataclass(rdclass) - self.rdtype = self._as_rdatatype(rdtype) - self.rdcomment = None - - def _get_all_slots(self): - return itertools.chain.from_iterable( - getattr(cls, "__slots__", []) for cls in self.__class__.__mro__ - ) - - def __getstate__(self): - # We used to try to do a tuple of all slots here, but it - # doesn't work as self._all_slots isn't available at - # __setstate__() time. Before that we tried to store a tuple - # of __slots__, but that didn't work as it didn't store the - # slots defined by ancestors. This older way didn't fail - # outright, but ended up with partially broken objects, e.g. - # if you unpickled an A RR it wouldn't have rdclass and rdtype - # attributes, and would compare badly. - state = {} - for slot in self._get_all_slots(): - state[slot] = getattr(self, slot) - return state - - def __setstate__(self, state): - for slot, val in state.items(): - object.__setattr__(self, slot, val) - if not hasattr(self, "rdcomment"): - # Pickled rdata from 2.0.x might not have a rdcomment, so add - # it if needed. - object.__setattr__(self, "rdcomment", None) - - def covers(self) -> dns.rdatatype.RdataType: - """Return the type a Rdata covers. - - DNS SIG/RRSIG rdatas apply to a specific type; this type is - returned by the covers() function. If the rdata type is not - SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when - creating rdatasets, allowing the rdataset to contain only RRSIGs - of a particular type, e.g. RRSIG(NS). - - Returns a ``dns.rdatatype.RdataType``. - """ - - return dns.rdatatype.NONE - - def extended_rdatatype(self) -> int: - """Return a 32-bit type value, the least significant 16 bits of - which are the ordinary DNS type, and the upper 16 bits of which are - the "covered" type, if any. - - Returns an ``int``. - """ - - return self.covers() << 16 | self.rdtype - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any], - ) -> str: - """Convert an rdata to text format. - - Returns a ``str``. - """ - - raise NotImplementedError # pragma: no cover - - def _to_wire( - self, - file: Optional[Any], - compress: Optional[dns.name.CompressType] = None, - origin: Optional[dns.name.Name] = None, - canonicalize: bool = False, - ) -> None: - raise NotImplementedError # pragma: no cover - - def to_wire( - self, - file: Optional[Any] = None, - compress: Optional[dns.name.CompressType] = None, - origin: Optional[dns.name.Name] = None, - canonicalize: bool = False, - ) -> Optional[bytes]: - """Convert an rdata to wire format. - - Returns a ``bytes`` if no output file was specified, or ``None`` otherwise. - """ - - if file: - # We call _to_wire() and then return None explicitly instead of - # of just returning the None from _to_wire() as mypy's func-returns-value - # unhelpfully errors out with "error: "_to_wire" of "Rdata" does not return - # a value (it only ever returns None)" - self._to_wire(file, compress, origin, canonicalize) - return None - else: - f = io.BytesIO() - self._to_wire(f, compress, origin, canonicalize) - return f.getvalue() - - def to_generic( - self, origin: Optional[dns.name.Name] = None - ) -> "dns.rdata.GenericRdata": - """Creates a dns.rdata.GenericRdata equivalent of this rdata. - - Returns a ``dns.rdata.GenericRdata``. - """ - return dns.rdata.GenericRdata( - self.rdclass, self.rdtype, self.to_wire(origin=origin) - ) - - def to_digestable(self, origin: Optional[dns.name.Name] = None) -> bytes: - """Convert rdata to a format suitable for digesting in hashes. This - is also the DNSSEC canonical form. - - Returns a ``bytes``. - """ - wire = self.to_wire(origin=origin, canonicalize=True) - assert wire is not None # for mypy - return wire - - def __repr__(self): - covers = self.covers() - if covers == dns.rdatatype.NONE: - ctext = "" - else: - ctext = "(" + dns.rdatatype.to_text(covers) + ")" - return ( - "" - ) - - def __str__(self): - return self.to_text() - - def _cmp(self, other): - """Compare an rdata with another rdata of the same rdtype and - rdclass. - - For rdata with only absolute names: - Return < 0 if self < other in the DNSSEC ordering, 0 if self - == other, and > 0 if self > other. - For rdata with at least one relative names: - The rdata sorts before any rdata with only absolute names. - When compared with another relative rdata, all names are - made absolute as if they were relative to the root, as the - proper origin is not available. While this creates a stable - ordering, it is NOT guaranteed to be the DNSSEC ordering. - In the future, all ordering comparisons for rdata with - relative names will be disallowed. - """ - try: - our = self.to_digestable() - our_relative = False - except dns.name.NeedAbsoluteNameOrOrigin: - if _allow_relative_comparisons: - our = self.to_digestable(dns.name.root) - our_relative = True - try: - their = other.to_digestable() - their_relative = False - except dns.name.NeedAbsoluteNameOrOrigin: - if _allow_relative_comparisons: - their = other.to_digestable(dns.name.root) - their_relative = True - if _allow_relative_comparisons: - if our_relative != their_relative: - # For the purpose of comparison, all rdata with at least one - # relative name is less than an rdata with only absolute names. - if our_relative: - return -1 - else: - return 1 - elif our_relative or their_relative: - raise NoRelativeRdataOrdering - if our == their: - return 0 - elif our > their: - return 1 - else: - return -1 - - def __eq__(self, other): - if not isinstance(other, Rdata): - return False - if self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return False - our_relative = False - their_relative = False - try: - our = self.to_digestable() - except dns.name.NeedAbsoluteNameOrOrigin: - our = self.to_digestable(dns.name.root) - our_relative = True - try: - their = other.to_digestable() - except dns.name.NeedAbsoluteNameOrOrigin: - their = other.to_digestable(dns.name.root) - their_relative = True - if our_relative != their_relative: - return False - return our == their - - def __ne__(self, other): - if not isinstance(other, Rdata): - return True - if self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return True - return not self.__eq__(other) - - def __lt__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) < 0 - - def __le__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) <= 0 - - def __ge__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) >= 0 - - def __gt__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) > 0 - - def __hash__(self): - return hash(self.to_digestable(dns.name.root)) - - @classmethod - def from_text( - cls, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - tok: dns.tokenizer.Tokenizer, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, - ) -> "Rdata": - raise NotImplementedError # pragma: no cover - - @classmethod - def from_wire_parser( - cls, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - parser: dns.wire.Parser, - origin: Optional[dns.name.Name] = None, - ) -> "Rdata": - raise NotImplementedError # pragma: no cover - - def replace(self, **kwargs: Any) -> "Rdata": - """ - Create a new Rdata instance based on the instance replace was - invoked on. It is possible to pass different parameters to - override the corresponding properties of the base Rdata. - - Any field specific to the Rdata type can be replaced, but the - *rdtype* and *rdclass* fields cannot. - - Returns an instance of the same Rdata subclass as *self*. - """ - - # Get the constructor parameters. - parameters = inspect.signature(self.__init__).parameters # type: ignore - - # Ensure that all of the arguments correspond to valid fields. - # Don't allow rdclass or rdtype to be changed, though. - for key in kwargs: - if key == "rdcomment": - continue - if key not in parameters: - raise AttributeError( - f"'{self.__class__.__name__}' object has no attribute '{key}'" - ) - if key in ("rdclass", "rdtype"): - raise AttributeError( - f"Cannot overwrite '{self.__class__.__name__}' attribute '{key}'" - ) - - # Construct the parameter list. For each field, use the value in - # kwargs if present, and the current value otherwise. - args = (kwargs.get(key, getattr(self, key)) for key in parameters) - - # Create, validate, and return the new object. - rd = self.__class__(*args) - # The comment is not set in the constructor, so give it special - # handling. - rdcomment = kwargs.get("rdcomment", self.rdcomment) - if rdcomment is not None: - object.__setattr__(rd, "rdcomment", rdcomment) - return rd - - # Type checking and conversion helpers. These are class methods as - # they don't touch object state and may be useful to others. - - @classmethod - def _as_rdataclass(cls, value): - return dns.rdataclass.RdataClass.make(value) - - @classmethod - def _as_rdatatype(cls, value): - return dns.rdatatype.RdataType.make(value) - - @classmethod - def _as_bytes( - cls, - value: Any, - encode: bool = False, - max_length: Optional[int] = None, - empty_ok: bool = True, - ) -> bytes: - if encode and isinstance(value, str): - bvalue = value.encode() - elif isinstance(value, bytearray): - bvalue = bytes(value) - elif isinstance(value, bytes): - bvalue = value - else: - raise ValueError("not bytes") - if max_length is not None and len(bvalue) > max_length: - raise ValueError("too long") - if not empty_ok and len(bvalue) == 0: - raise ValueError("empty bytes not allowed") - return bvalue - - @classmethod - def _as_name(cls, value): - # Note that proper name conversion (e.g. with origin and IDNA - # awareness) is expected to be done via from_text. This is just - # a simple thing for people invoking the constructor directly. - if isinstance(value, str): - return dns.name.from_text(value) - elif not isinstance(value, dns.name.Name): - raise ValueError("not a name") - return value - - @classmethod - def _as_uint8(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 255: - raise ValueError("not a uint8") - return value - - @classmethod - def _as_uint16(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 65535: - raise ValueError("not a uint16") - return value - - @classmethod - def _as_uint32(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 4294967295: - raise ValueError("not a uint32") - return value - - @classmethod - def _as_uint48(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 281474976710655: - raise ValueError("not a uint48") - return value - - @classmethod - def _as_int(cls, value, low=None, high=None): - if not isinstance(value, int): - raise ValueError("not an integer") - if low is not None and value < low: - raise ValueError("value too small") - if high is not None and value > high: - raise ValueError("value too large") - return value - - @classmethod - def _as_ipv4_address(cls, value): - if isinstance(value, str): - return dns.ipv4.canonicalize(value) - elif isinstance(value, bytes): - return dns.ipv4.inet_ntoa(value) - else: - raise ValueError("not an IPv4 address") - - @classmethod - def _as_ipv6_address(cls, value): - if isinstance(value, str): - return dns.ipv6.canonicalize(value) - elif isinstance(value, bytes): - return dns.ipv6.inet_ntoa(value) - else: - raise ValueError("not an IPv6 address") - - @classmethod - def _as_bool(cls, value): - if isinstance(value, bool): - return value - else: - raise ValueError("not a boolean") - - @classmethod - def _as_ttl(cls, value): - if isinstance(value, int): - return cls._as_int(value, 0, dns.ttl.MAX_TTL) - elif isinstance(value, str): - return dns.ttl.from_text(value) - else: - raise ValueError("not a TTL") - - @classmethod - def _as_tuple(cls, value, as_value): - try: - # For user convenience, if value is a singleton of the list - # element type, wrap it in a tuple. - return (as_value(value),) - except Exception: - # Otherwise, check each element of the iterable *value* - # against *as_value*. - return tuple(as_value(v) for v in value) - - # Processing order - - @classmethod - def _processing_order(cls, iterable): - items = list(iterable) - random.shuffle(items) - return items - - -@dns.immutable.immutable -class GenericRdata(Rdata): - """Generic Rdata Class - - This class is used for rdata types for which we have no better - implementation. It implements the DNS "unknown RRs" scheme. - """ - - __slots__ = ["data"] - - def __init__(self, rdclass, rdtype, data): - super().__init__(rdclass, rdtype) - self.data = data - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any], - ) -> str: - return r"\# %d " % len(self.data) + _hexify(self.data, **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - token = tok.get() - if not token.is_identifier() or token.value != r"\#": - raise dns.exception.SyntaxError(r"generic rdata does not start with \#") - length = tok.get_int() - hex = tok.concatenate_remaining_identifiers(True).encode() - data = binascii.unhexlify(hex) - if len(data) != length: - raise dns.exception.SyntaxError("generic rdata hex data has wrong length") - return cls(rdclass, rdtype, data) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.data) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - return cls(rdclass, rdtype, parser.get_remaining()) - - -_rdata_classes: Dict[Tuple[dns.rdataclass.RdataClass, dns.rdatatype.RdataType], Any] = ( - {} -) -_module_prefix = "dns.rdtypes" -_dynamic_load_allowed = True - - -def get_rdata_class(rdclass, rdtype, use_generic=True): - cls = _rdata_classes.get((rdclass, rdtype)) - if not cls: - cls = _rdata_classes.get((dns.rdatatype.ANY, rdtype)) - if not cls and _dynamic_load_allowed: - rdclass_text = dns.rdataclass.to_text(rdclass) - rdtype_text = dns.rdatatype.to_text(rdtype) - rdtype_text = rdtype_text.replace("-", "_") - try: - mod = import_module( - ".".join([_module_prefix, rdclass_text, rdtype_text]) - ) - cls = getattr(mod, rdtype_text) - _rdata_classes[(rdclass, rdtype)] = cls - except ImportError: - try: - mod = import_module(".".join([_module_prefix, "ANY", rdtype_text])) - cls = getattr(mod, rdtype_text) - _rdata_classes[(dns.rdataclass.ANY, rdtype)] = cls - _rdata_classes[(rdclass, rdtype)] = cls - except ImportError: - pass - if not cls and use_generic: - cls = GenericRdata - _rdata_classes[(rdclass, rdtype)] = cls - return cls - - -def load_all_types(disable_dynamic_load=True): - """Load all rdata types for which dnspython has a non-generic implementation. - - Normally dnspython loads DNS rdatatype implementations on demand, but in some - specialized cases loading all types at an application-controlled time is preferred. - - If *disable_dynamic_load*, a ``bool``, is ``True`` then dnspython will not attempt - to use its dynamic loading mechanism if an unknown type is subsequently encountered, - and will simply use the ``GenericRdata`` class. - """ - # Load class IN and ANY types. - for rdtype in dns.rdatatype.RdataType: - get_rdata_class(dns.rdataclass.IN, rdtype, False) - # Load the one non-ANY implementation we have in CH. Everything - # else in CH is an ANY type, and we'll discover those on demand but won't - # have to import anything. - get_rdata_class(dns.rdataclass.CH, dns.rdatatype.A, False) - if disable_dynamic_load: - # Now disable dynamic loading so any subsequent unknown type immediately becomes - # GenericRdata without a load attempt. - global _dynamic_load_allowed - _dynamic_load_allowed = False - - -def from_text( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - tok: Union[dns.tokenizer.Tokenizer, str], - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, -) -> Rdata: - """Build an rdata object from text format. - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_text() class method is called - with the parameters to this function. - - If *tok* is a ``str``, then a tokenizer is created and the string - is used as its input. - - *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. - - *tok*, a ``dns.tokenizer.Tokenizer`` or a ``str``. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use if a tokenizer needs to be created. If - ``None``, the default IDNA 2003 encoder/decoder is used. If a - tokenizer is not created, then the codec associated with the tokenizer - is the one that is used. - - Returns an instance of the chosen Rdata subclass. - - """ - if isinstance(tok, str): - tok = dns.tokenizer.Tokenizer(tok, idna_codec=idna_codec) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - cls = get_rdata_class(rdclass, rdtype) - with dns.exception.ExceptionWrapper(dns.exception.SyntaxError): - rdata = None - if cls != GenericRdata: - # peek at first token - token = tok.get() - tok.unget(token) - if token.is_identifier() and token.value == r"\#": - # - # Known type using the generic syntax. Extract the - # wire form from the generic syntax, and then run - # from_wire on it. - # - grdata = GenericRdata.from_text( - rdclass, rdtype, tok, origin, relativize, relativize_to - ) - rdata = from_wire( - rdclass, rdtype, grdata.data, 0, len(grdata.data), origin - ) - # - # If this comparison isn't equal, then there must have been - # compressed names in the wire format, which is an error, - # there being no reasonable context to decompress with. - # - rwire = rdata.to_wire() - if rwire != grdata.data: - raise dns.exception.SyntaxError( - "compressed data in " - "generic syntax form " - "of known rdatatype" - ) - if rdata is None: - rdata = cls.from_text( - rdclass, rdtype, tok, origin, relativize, relativize_to - ) - token = tok.get_eol_as_token() - if token.comment is not None: - object.__setattr__(rdata, "rdcomment", token.comment) - return rdata - - -def from_wire_parser( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - parser: dns.wire.Parser, - origin: Optional[dns.name.Name] = None, -) -> Rdata: - """Build an rdata object from wire format - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_wire() class method is called - with the parameters to this function. - - *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. - - *parser*, a ``dns.wire.Parser``, the parser, which should be - restricted to the rdata length. - - *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, - then names will be relativized to this origin. - - Returns an instance of the chosen Rdata subclass. - """ - - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - cls = get_rdata_class(rdclass, rdtype) - with dns.exception.ExceptionWrapper(dns.exception.FormError): - return cls.from_wire_parser(rdclass, rdtype, parser, origin) - - -def from_wire( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - wire: bytes, - current: int, - rdlen: int, - origin: Optional[dns.name.Name] = None, -) -> Rdata: - """Build an rdata object from wire format - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_wire() class method is called - with the parameters to this function. - - *rdclass*, an ``int``, the rdataclass. - - *rdtype*, an ``int``, the rdatatype. - - *wire*, a ``bytes``, the wire-format message. - - *current*, an ``int``, the offset in wire of the beginning of - the rdata. - - *rdlen*, an ``int``, the length of the wire-format rdata - - *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, - then names will be relativized to this origin. - - Returns an instance of the chosen Rdata subclass. - """ - parser = dns.wire.Parser(wire, current) - with parser.restrict_to(rdlen): - return from_wire_parser(rdclass, rdtype, parser, origin) - - -class RdatatypeExists(dns.exception.DNSException): - """DNS rdatatype already exists.""" - - supp_kwargs = {"rdclass", "rdtype"} - fmt = ( - "The rdata type with class {rdclass:d} and rdtype {rdtype:d} " - + "already exists." - ) - - -def register_type( - implementation: Any, - rdtype: int, - rdtype_text: str, - is_singleton: bool = False, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, -) -> None: - """Dynamically register a module to handle an rdatatype. - - *implementation*, a module implementing the type in the usual dnspython - way. - - *rdtype*, an ``int``, the rdatatype to register. - - *rdtype_text*, a ``str``, the textual form of the rdatatype. - - *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. - RRsets of the type can have only one member.) - - *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if - it applies to all classes. - """ - - rdtype = dns.rdatatype.RdataType.make(rdtype) - existing_cls = get_rdata_class(rdclass, rdtype) - if existing_cls != GenericRdata or dns.rdatatype.is_metatype(rdtype): - raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) - _rdata_classes[(rdclass, rdtype)] = getattr( - implementation, rdtype_text.replace("-", "_") - ) - dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/venv/lib/python3.12/site-packages/dns/rdataclass.py b/venv/lib/python3.12/site-packages/dns/rdataclass.py deleted file mode 100644 index 89b85a79..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdataclass.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Rdata Classes.""" - -import dns.enum -import dns.exception - - -class RdataClass(dns.enum.IntEnum): - """DNS Rdata Class""" - - RESERVED0 = 0 - IN = 1 - INTERNET = IN - CH = 3 - CHAOS = CH - HS = 4 - HESIOD = HS - NONE = 254 - ANY = 255 - - @classmethod - def _maximum(cls): - return 65535 - - @classmethod - def _short_name(cls): - return "class" - - @classmethod - def _prefix(cls): - return "CLASS" - - @classmethod - def _unknown_exception_class(cls): - return UnknownRdataclass - - -_metaclasses = {RdataClass.NONE, RdataClass.ANY} - - -class UnknownRdataclass(dns.exception.DNSException): - """A DNS class is unknown.""" - - -def from_text(text: str) -> RdataClass: - """Convert text into a DNS rdata class value. - - The input text can be a defined DNS RR class mnemonic or - instance of the DNS generic class syntax. - - For example, "IN" and "CLASS1" will both result in a value of 1. - - Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. - - Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. - - Returns a ``dns.rdataclass.RdataClass``. - """ - - return RdataClass.from_text(text) - - -def to_text(value: RdataClass) -> str: - """Convert a DNS rdata class value to text. - - If the value has a known mnemonic, it will be used, otherwise the - DNS generic class syntax will be used. - - Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. - - Returns a ``str``. - """ - - return RdataClass.to_text(value) - - -def is_metaclass(rdclass: RdataClass) -> bool: - """True if the specified class is a metaclass. - - The currently defined metaclasses are ANY and NONE. - - *rdclass* is a ``dns.rdataclass.RdataClass``. - """ - - if rdclass in _metaclasses: - return True - return False - - -### BEGIN generated RdataClass constants - -RESERVED0 = RdataClass.RESERVED0 -IN = RdataClass.IN -INTERNET = RdataClass.INTERNET -CH = RdataClass.CH -CHAOS = RdataClass.CHAOS -HS = RdataClass.HS -HESIOD = RdataClass.HESIOD -NONE = RdataClass.NONE -ANY = RdataClass.ANY - -### END generated RdataClass constants diff --git a/venv/lib/python3.12/site-packages/dns/rdataset.py b/venv/lib/python3.12/site-packages/dns/rdataset.py deleted file mode 100644 index 39cab236..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdataset.py +++ /dev/null @@ -1,512 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" - -import io -import random -import struct -from typing import Any, Collection, Dict, List, Optional, Union, cast - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdataclass -import dns.rdatatype -import dns.renderer -import dns.set -import dns.ttl - -# define SimpleSet here for backwards compatibility -SimpleSet = dns.set.Set - - -class DifferingCovers(dns.exception.DNSException): - """An attempt was made to add a DNS SIG/RRSIG whose covered type - is not the same as that of the other rdatas in the rdataset.""" - - -class IncompatibleTypes(dns.exception.DNSException): - """An attempt was made to add DNS RR data of an incompatible type.""" - - -class Rdataset(dns.set.Set): - """A DNS rdataset.""" - - __slots__ = ["rdclass", "rdtype", "covers", "ttl"] - - def __init__( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ttl: int = 0, - ): - """Create a new rdataset of the specified class and type. - - *rdclass*, a ``dns.rdataclass.RdataClass``, the rdataclass. - - *rdtype*, an ``dns.rdatatype.RdataType``, the rdatatype. - - *covers*, an ``dns.rdatatype.RdataType``, the covered rdatatype. - - *ttl*, an ``int``, the TTL. - """ - - super().__init__() - self.rdclass = rdclass - self.rdtype: dns.rdatatype.RdataType = rdtype - self.covers: dns.rdatatype.RdataType = covers - self.ttl = ttl - - def _clone(self): - obj = super()._clone() - obj.rdclass = self.rdclass - obj.rdtype = self.rdtype - obj.covers = self.covers - obj.ttl = self.ttl - return obj - - def update_ttl(self, ttl: int) -> None: - """Perform TTL minimization. - - Set the TTL of the rdataset to be the lesser of the set's current - TTL or the specified TTL. If the set contains no rdatas, set the TTL - to the specified TTL. - - *ttl*, an ``int`` or ``str``. - """ - ttl = dns.ttl.make(ttl) - if len(self) == 0: - self.ttl = ttl - elif ttl < self.ttl: - self.ttl = ttl - - def add( # pylint: disable=arguments-differ,arguments-renamed - self, rd: dns.rdata.Rdata, ttl: Optional[int] = None - ) -> None: - """Add the specified rdata to the rdataset. - - If the optional *ttl* parameter is supplied, then - ``self.update_ttl(ttl)`` will be called prior to adding the rdata. - - *rd*, a ``dns.rdata.Rdata``, the rdata - - *ttl*, an ``int``, the TTL. - - Raises ``dns.rdataset.IncompatibleTypes`` if the type and class - do not match the type and class of the rdataset. - - Raises ``dns.rdataset.DifferingCovers`` if the type is a signature - type and the covered type does not match that of the rdataset. - """ - - # - # If we're adding a signature, do some special handling to - # check that the signature covers the same type as the - # other rdatas in this rdataset. If this is the first rdata - # in the set, initialize the covers field. - # - if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: - raise IncompatibleTypes - if ttl is not None: - self.update_ttl(ttl) - if self.rdtype == dns.rdatatype.RRSIG or self.rdtype == dns.rdatatype.SIG: - covers = rd.covers() - if len(self) == 0 and self.covers == dns.rdatatype.NONE: - self.covers = covers - elif self.covers != covers: - raise DifferingCovers - if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: - self.clear() - super().add(rd) - - def union_update(self, other): - self.update_ttl(other.ttl) - super().union_update(other) - - def intersection_update(self, other): - self.update_ttl(other.ttl) - super().intersection_update(other) - - def update(self, other): - """Add all rdatas in other to self. - - *other*, a ``dns.rdataset.Rdataset``, the rdataset from which - to update. - """ - - self.update_ttl(other.ttl) - super().update(other) - - def _rdata_repr(self): - def maybe_truncate(s): - if len(s) > 100: - return s[:100] + "..." - return s - - return "[" + ", ".join(f"<{maybe_truncate(str(rr))}>" for rr in self) + "]" - - def __repr__(self): - if self.covers == 0: - ctext = "" - else: - ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" - return ( - "" - ) - - def __str__(self): - return self.to_text() - - def __eq__(self, other): - if not isinstance(other, Rdataset): - return False - if ( - self.rdclass != other.rdclass - or self.rdtype != other.rdtype - or self.covers != other.covers - ): - return False - return super().__eq__(other) - - def __ne__(self, other): - return not self.__eq__(other) - - def to_text( - self, - name: Optional[dns.name.Name] = None, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - override_rdclass: Optional[dns.rdataclass.RdataClass] = None, - want_comments: bool = False, - **kw: Dict[str, Any], - ) -> str: - """Convert the rdataset into DNS zone file format. - - See ``dns.name.Name.choose_relativity`` for more information - on how *origin* and *relativize* determine the way names - are emitted. - - Any additional keyword arguments are passed on to the rdata - ``to_text()`` method. - - *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with - *name* as the owner name. - - *origin*, a ``dns.name.Name`` or ``None``, the origin for relative - names. - - *relativize*, a ``bool``. If ``True``, names will be relativized - to *origin*. - - *override_rdclass*, a ``dns.rdataclass.RdataClass`` or ``None``. - If not ``None``, use this class instead of the Rdataset's class. - - *want_comments*, a ``bool``. If ``True``, emit comments for rdata - which have them. The default is ``False``. - """ - - if name is not None: - name = name.choose_relativity(origin, relativize) - ntext = str(name) - pad = " " - else: - ntext = "" - pad = "" - s = io.StringIO() - if override_rdclass is not None: - rdclass = override_rdclass - else: - rdclass = self.rdclass - if len(self) == 0: - # - # Empty rdatasets are used for the question section, and in - # some dynamic updates, so we don't need to print out the TTL - # (which is meaningless anyway). - # - s.write( - f"{ntext}{pad}{dns.rdataclass.to_text(rdclass)} " - f"{dns.rdatatype.to_text(self.rdtype)}\n" - ) - else: - for rd in self: - extra = "" - if want_comments: - if rd.rdcomment: - extra = f" ;{rd.rdcomment}" - s.write( - "%s%s%d %s %s %s%s\n" - % ( - ntext, - pad, - self.ttl, - dns.rdataclass.to_text(rdclass), - dns.rdatatype.to_text(self.rdtype), - rd.to_text(origin=origin, relativize=relativize, **kw), - extra, - ) - ) - # - # We strip off the final \n for the caller's convenience in printing - # - return s.getvalue()[:-1] - - def to_wire( - self, - name: dns.name.Name, - file: Any, - compress: Optional[dns.name.CompressType] = None, - origin: Optional[dns.name.Name] = None, - override_rdclass: Optional[dns.rdataclass.RdataClass] = None, - want_shuffle: bool = True, - ) -> int: - """Convert the rdataset to wire format. - - *name*, a ``dns.name.Name`` is the owner name to use. - - *file* is the file where the name is emitted (typically a - BytesIO file). - - *compress*, a ``dict``, is the compression table to use. If - ``None`` (the default), names will not be compressed. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then *origin* will be appended - to it. - - *override_rdclass*, an ``int``, is used as the class instead of the - class of the rdataset. This is useful when rendering rdatasets - associated with dynamic updates. - - *want_shuffle*, a ``bool``. If ``True``, then the order of the - Rdatas within the Rdataset will be shuffled before rendering. - - Returns an ``int``, the number of records emitted. - """ - - if override_rdclass is not None: - rdclass = override_rdclass - want_shuffle = False - else: - rdclass = self.rdclass - if len(self) == 0: - name.to_wire(file, compress, origin) - file.write(struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)) - return 1 - else: - l: Union[Rdataset, List[dns.rdata.Rdata]] - if want_shuffle: - l = list(self) - random.shuffle(l) - else: - l = self - for rd in l: - name.to_wire(file, compress, origin) - file.write(struct.pack("!HHI", self.rdtype, rdclass, self.ttl)) - with dns.renderer.prefixed_length(file, 2): - rd.to_wire(file, compress, origin) - return len(self) - - def match( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - ) -> bool: - """Returns ``True`` if this rdataset matches the specified class, - type, and covers. - """ - if self.rdclass == rdclass and self.rdtype == rdtype and self.covers == covers: - return True - return False - - def processing_order(self) -> List[dns.rdata.Rdata]: - """Return rdatas in a valid processing order according to the type's - specification. For example, MX records are in preference order from - lowest to highest preferences, with items of the same preference - shuffled. - - For types that do not define a processing order, the rdatas are - simply shuffled. - """ - if len(self) == 0: - return [] - else: - return self[0]._processing_order(iter(self)) - - -@dns.immutable.immutable -class ImmutableRdataset(Rdataset): # lgtm[py/missing-equals] - """An immutable DNS rdataset.""" - - _clone_class = Rdataset - - def __init__(self, rdataset: Rdataset): - """Create an immutable rdataset from the specified rdataset.""" - - super().__init__( - rdataset.rdclass, rdataset.rdtype, rdataset.covers, rdataset.ttl - ) - self.items = dns.immutable.Dict(rdataset.items) - - def update_ttl(self, ttl): - raise TypeError("immutable") - - def add(self, rd, ttl=None): - raise TypeError("immutable") - - def union_update(self, other): - raise TypeError("immutable") - - def intersection_update(self, other): - raise TypeError("immutable") - - def update(self, other): - raise TypeError("immutable") - - def __delitem__(self, i): - raise TypeError("immutable") - - # lgtm complains about these not raising ArithmeticError, but there is - # precedent for overrides of these methods in other classes to raise - # TypeError, and it seems like the better exception. - - def __ior__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def __iand__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def __iadd__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def __isub__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def clear(self): - raise TypeError("immutable") - - def __copy__(self): - return ImmutableRdataset(super().copy()) - - def copy(self): - return ImmutableRdataset(super().copy()) - - def union(self, other): - return ImmutableRdataset(super().union(other)) - - def intersection(self, other): - return ImmutableRdataset(super().intersection(other)) - - def difference(self, other): - return ImmutableRdataset(super().difference(other)) - - def symmetric_difference(self, other): - return ImmutableRdataset(super().symmetric_difference(other)) - - -def from_text_list( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - ttl: int, - text_rdatas: Collection[str], - idna_codec: Optional[dns.name.IDNACodec] = None, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, -) -> Rdataset: - """Create an rdataset with the specified class, type, and TTL, and with - the specified list of rdatas in text format. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use; if ``None``, the default IDNA 2003 - encoder/decoder is used. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - r = Rdataset(rdclass, rdtype) - r.update_ttl(ttl) - for t in text_rdatas: - rd = dns.rdata.from_text( - r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec - ) - r.add(rd) - return r - - -def from_text( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - ttl: int, - *text_rdatas: Any, -) -> Rdataset: - """Create an rdataset with the specified class, type, and TTL, and with - the specified rdatas in text format. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - return from_text_list(rdclass, rdtype, ttl, cast(Collection[str], text_rdatas)) - - -def from_rdata_list(ttl: int, rdatas: Collection[dns.rdata.Rdata]) -> Rdataset: - """Create an rdataset with the specified TTL, and with - the specified list of rdata objects. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - if len(rdatas) == 0: - raise ValueError("rdata list must not be empty") - r = None - for rd in rdatas: - if r is None: - r = Rdataset(rd.rdclass, rd.rdtype) - r.update_ttl(ttl) - r.add(rd) - assert r is not None - return r - - -def from_rdata(ttl: int, *rdatas: Any) -> Rdataset: - """Create an rdataset with the specified TTL, and with - the specified rdata objects. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - return from_rdata_list(ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/venv/lib/python3.12/site-packages/dns/rdatatype.py b/venv/lib/python3.12/site-packages/dns/rdatatype.py deleted file mode 100644 index aa9e561c..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdatatype.py +++ /dev/null @@ -1,336 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Rdata Types.""" - -from typing import Dict - -import dns.enum -import dns.exception - - -class RdataType(dns.enum.IntEnum): - """DNS Rdata Type""" - - TYPE0 = 0 - NONE = 0 - A = 1 - NS = 2 - MD = 3 - MF = 4 - CNAME = 5 - SOA = 6 - MB = 7 - MG = 8 - MR = 9 - NULL = 10 - WKS = 11 - PTR = 12 - HINFO = 13 - MINFO = 14 - MX = 15 - TXT = 16 - RP = 17 - AFSDB = 18 - X25 = 19 - ISDN = 20 - RT = 21 - NSAP = 22 - NSAP_PTR = 23 - SIG = 24 - KEY = 25 - PX = 26 - GPOS = 27 - AAAA = 28 - LOC = 29 - NXT = 30 - SRV = 33 - NAPTR = 35 - KX = 36 - CERT = 37 - A6 = 38 - DNAME = 39 - OPT = 41 - APL = 42 - DS = 43 - SSHFP = 44 - IPSECKEY = 45 - RRSIG = 46 - NSEC = 47 - DNSKEY = 48 - DHCID = 49 - NSEC3 = 50 - NSEC3PARAM = 51 - TLSA = 52 - SMIMEA = 53 - HIP = 55 - NINFO = 56 - CDS = 59 - CDNSKEY = 60 - OPENPGPKEY = 61 - CSYNC = 62 - ZONEMD = 63 - SVCB = 64 - HTTPS = 65 - SPF = 99 - UNSPEC = 103 - NID = 104 - L32 = 105 - L64 = 106 - LP = 107 - EUI48 = 108 - EUI64 = 109 - TKEY = 249 - TSIG = 250 - IXFR = 251 - AXFR = 252 - MAILB = 253 - MAILA = 254 - ANY = 255 - URI = 256 - CAA = 257 - AVC = 258 - AMTRELAY = 260 - RESINFO = 261 - WALLET = 262 - TA = 32768 - DLV = 32769 - - @classmethod - def _maximum(cls): - return 65535 - - @classmethod - def _short_name(cls): - return "type" - - @classmethod - def _prefix(cls): - return "TYPE" - - @classmethod - def _extra_from_text(cls, text): - if text.find("-") >= 0: - try: - return cls[text.replace("-", "_")] - except KeyError: # pragma: no cover - pass - return _registered_by_text.get(text) - - @classmethod - def _extra_to_text(cls, value, current_text): - if current_text is None: - return _registered_by_value.get(value) - if current_text.find("_") >= 0: - return current_text.replace("_", "-") - return current_text - - @classmethod - def _unknown_exception_class(cls): - return UnknownRdatatype - - -_registered_by_text: Dict[str, RdataType] = {} -_registered_by_value: Dict[RdataType, str] = {} - -_metatypes = {RdataType.OPT} - -_singletons = { - RdataType.SOA, - RdataType.NXT, - RdataType.DNAME, - RdataType.NSEC, - RdataType.CNAME, -} - - -class UnknownRdatatype(dns.exception.DNSException): - """DNS resource record type is unknown.""" - - -def from_text(text: str) -> RdataType: - """Convert text into a DNS rdata type value. - - The input text can be a defined DNS RR type mnemonic or - instance of the DNS generic type syntax. - - For example, "NS" and "TYPE2" will both result in a value of 2. - - Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. - - Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. - - Returns a ``dns.rdatatype.RdataType``. - """ - - return RdataType.from_text(text) - - -def to_text(value: RdataType) -> str: - """Convert a DNS rdata type value to text. - - If the value has a known mnemonic, it will be used, otherwise the - DNS generic type syntax will be used. - - Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. - - Returns a ``str``. - """ - - return RdataType.to_text(value) - - -def is_metatype(rdtype: RdataType) -> bool: - """True if the specified type is a metatype. - - *rdtype* is a ``dns.rdatatype.RdataType``. - - The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, - MAILB, ANY, and OPT. - - Returns a ``bool``. - """ - - return (256 > rdtype >= 128) or rdtype in _metatypes - - -def is_singleton(rdtype: RdataType) -> bool: - """Is the specified type a singleton type? - - Singleton types can only have a single rdata in an rdataset, or a single - RR in an RRset. - - The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and - SOA. - - *rdtype* is an ``int``. - - Returns a ``bool``. - """ - - if rdtype in _singletons: - return True - return False - - -# pylint: disable=redefined-outer-name -def register_type( - rdtype: RdataType, rdtype_text: str, is_singleton: bool = False -) -> None: - """Dynamically register an rdatatype. - - *rdtype*, a ``dns.rdatatype.RdataType``, the rdatatype to register. - - *rdtype_text*, a ``str``, the textual form of the rdatatype. - - *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. - RRsets of the type can have only one member.) - """ - - _registered_by_text[rdtype_text] = rdtype - _registered_by_value[rdtype] = rdtype_text - if is_singleton: - _singletons.add(rdtype) - - -### BEGIN generated RdataType constants - -TYPE0 = RdataType.TYPE0 -NONE = RdataType.NONE -A = RdataType.A -NS = RdataType.NS -MD = RdataType.MD -MF = RdataType.MF -CNAME = RdataType.CNAME -SOA = RdataType.SOA -MB = RdataType.MB -MG = RdataType.MG -MR = RdataType.MR -NULL = RdataType.NULL -WKS = RdataType.WKS -PTR = RdataType.PTR -HINFO = RdataType.HINFO -MINFO = RdataType.MINFO -MX = RdataType.MX -TXT = RdataType.TXT -RP = RdataType.RP -AFSDB = RdataType.AFSDB -X25 = RdataType.X25 -ISDN = RdataType.ISDN -RT = RdataType.RT -NSAP = RdataType.NSAP -NSAP_PTR = RdataType.NSAP_PTR -SIG = RdataType.SIG -KEY = RdataType.KEY -PX = RdataType.PX -GPOS = RdataType.GPOS -AAAA = RdataType.AAAA -LOC = RdataType.LOC -NXT = RdataType.NXT -SRV = RdataType.SRV -NAPTR = RdataType.NAPTR -KX = RdataType.KX -CERT = RdataType.CERT -A6 = RdataType.A6 -DNAME = RdataType.DNAME -OPT = RdataType.OPT -APL = RdataType.APL -DS = RdataType.DS -SSHFP = RdataType.SSHFP -IPSECKEY = RdataType.IPSECKEY -RRSIG = RdataType.RRSIG -NSEC = RdataType.NSEC -DNSKEY = RdataType.DNSKEY -DHCID = RdataType.DHCID -NSEC3 = RdataType.NSEC3 -NSEC3PARAM = RdataType.NSEC3PARAM -TLSA = RdataType.TLSA -SMIMEA = RdataType.SMIMEA -HIP = RdataType.HIP -NINFO = RdataType.NINFO -CDS = RdataType.CDS -CDNSKEY = RdataType.CDNSKEY -OPENPGPKEY = RdataType.OPENPGPKEY -CSYNC = RdataType.CSYNC -ZONEMD = RdataType.ZONEMD -SVCB = RdataType.SVCB -HTTPS = RdataType.HTTPS -SPF = RdataType.SPF -UNSPEC = RdataType.UNSPEC -NID = RdataType.NID -L32 = RdataType.L32 -L64 = RdataType.L64 -LP = RdataType.LP -EUI48 = RdataType.EUI48 -EUI64 = RdataType.EUI64 -TKEY = RdataType.TKEY -TSIG = RdataType.TSIG -IXFR = RdataType.IXFR -AXFR = RdataType.AXFR -MAILB = RdataType.MAILB -MAILA = RdataType.MAILA -ANY = RdataType.ANY -URI = RdataType.URI -CAA = RdataType.CAA -AVC = RdataType.AVC -AMTRELAY = RdataType.AMTRELAY -RESINFO = RdataType.RESINFO -WALLET = RdataType.WALLET -TA = RdataType.TA -DLV = RdataType.DLV - -### END generated RdataType constants diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AFSDB.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AFSDB.py deleted file mode 100644 index 06a3b970..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AFSDB.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): - """AFSDB record""" - - # Use the property mechanism to make "subtype" an alias for the - # "preference" attribute, and "hostname" an alias for the "exchange" - # attribute. - # - # This lets us inherit the UncompressedMX implementation but lets - # the caller use appropriate attribute names for the rdata type. - # - # We probably lose some performance vs. a cut-and-paste - # implementation, but this way we don't copy code, and that's - # good. - - @property - def subtype(self): - "the AFSDB subtype" - return self.preference - - @property - def hostname(self): - "the AFSDB hostname" - return self.exchange diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AMTRELAY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AMTRELAY.py deleted file mode 100644 index ed2b072b..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AMTRELAY.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdtypes.util - - -class Relay(dns.rdtypes.util.Gateway): - name = "AMTRELAY relay" - - @property - def relay(self): - return self.gateway - - -@dns.immutable.immutable -class AMTRELAY(dns.rdata.Rdata): - """AMTRELAY record""" - - # see: RFC 8777 - - __slots__ = ["precedence", "discovery_optional", "relay_type", "relay"] - - def __init__( - self, rdclass, rdtype, precedence, discovery_optional, relay_type, relay - ): - super().__init__(rdclass, rdtype) - relay = Relay(relay_type, relay) - self.precedence = self._as_uint8(precedence) - self.discovery_optional = self._as_bool(discovery_optional) - self.relay_type = relay.type - self.relay = relay.relay - - def to_text(self, origin=None, relativize=True, **kw): - relay = Relay(self.relay_type, self.relay).to_text(origin, relativize) - return "%d %d %d %s" % ( - self.precedence, - self.discovery_optional, - self.relay_type, - relay, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - precedence = tok.get_uint8() - discovery_optional = tok.get_uint8() - if discovery_optional > 1: - raise dns.exception.SyntaxError("expecting 0 or 1") - discovery_optional = bool(discovery_optional) - relay_type = tok.get_uint8() - if relay_type > 0x7F: - raise dns.exception.SyntaxError("expecting an integer <= 127") - relay = Relay.from_text(relay_type, tok, origin, relativize, relativize_to) - return cls( - rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - relay_type = self.relay_type | (self.discovery_optional << 7) - header = struct.pack("!BB", self.precedence, relay_type) - file.write(header) - Relay(self.relay_type, self.relay).to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (precedence, relay_type) = parser.get_struct("!BB") - discovery_optional = bool(relay_type >> 7) - relay_type &= 0x7F - relay = Relay.from_wire_parser(relay_type, parser, origin) - return cls( - rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay - ) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AVC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AVC.py deleted file mode 100644 index a27ae2d6..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/AVC.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class AVC(dns.rdtypes.txtbase.TXTBase): - """AVC record""" - - # See: IANA dns parameters for AVC diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CAA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CAA.py deleted file mode 100644 index 2e6a7e7e..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CAA.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class CAA(dns.rdata.Rdata): - """CAA (Certification Authority Authorization) record""" - - # see: RFC 6844 - - __slots__ = ["flags", "tag", "value"] - - def __init__(self, rdclass, rdtype, flags, tag, value): - super().__init__(rdclass, rdtype) - self.flags = self._as_uint8(flags) - self.tag = self._as_bytes(tag, True, 255) - if not tag.isalnum(): - raise ValueError("tag is not alphanumeric") - self.value = self._as_bytes(value) - - def to_text(self, origin=None, relativize=True, **kw): - return '%u %s "%s"' % ( - self.flags, - dns.rdata._escapify(self.tag), - dns.rdata._escapify(self.value), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - flags = tok.get_uint8() - tag = tok.get_string().encode() - value = tok.get_string().encode() - return cls(rdclass, rdtype, flags, tag, value) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!B", self.flags)) - l = len(self.tag) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.tag) - file.write(self.value) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - flags = parser.get_uint8() - tag = parser.get_counted_bytes() - value = parser.get_remaining() - return cls(rdclass, rdtype, flags, tag, value) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDNSKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDNSKEY.py deleted file mode 100644 index b613409f..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDNSKEY.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] - -# pylint: disable=unused-import -from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] - REVOKE, - SEP, - ZONE, -) - -# pylint: enable=unused-import - - -@dns.immutable.immutable -class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): - """CDNSKEY record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDS.py deleted file mode 100644 index 8312b972..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CDS.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dsbase - - -@dns.immutable.immutable -class CDS(dns.rdtypes.dsbase.DSBase): - """CDS record""" - - _digest_length_by_type = { - **dns.rdtypes.dsbase.DSBase._digest_length_by_type, - 0: 1, # delete, RFC 8078 Sec. 4 (including Errata ID 5049) - } diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CERT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CERT.py deleted file mode 100644 index f369cc85..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CERT.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.dnssectypes -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - -_ctype_by_value = { - 1: "PKIX", - 2: "SPKI", - 3: "PGP", - 4: "IPKIX", - 5: "ISPKI", - 6: "IPGP", - 7: "ACPKIX", - 8: "IACPKIX", - 253: "URI", - 254: "OID", -} - -_ctype_by_name = { - "PKIX": 1, - "SPKI": 2, - "PGP": 3, - "IPKIX": 4, - "ISPKI": 5, - "IPGP": 6, - "ACPKIX": 7, - "IACPKIX": 8, - "URI": 253, - "OID": 254, -} - - -def _ctype_from_text(what): - v = _ctype_by_name.get(what) - if v is not None: - return v - return int(what) - - -def _ctype_to_text(what): - v = _ctype_by_value.get(what) - if v is not None: - return v - return str(what) - - -@dns.immutable.immutable -class CERT(dns.rdata.Rdata): - """CERT record""" - - # see RFC 4398 - - __slots__ = ["certificate_type", "key_tag", "algorithm", "certificate"] - - def __init__( - self, rdclass, rdtype, certificate_type, key_tag, algorithm, certificate - ): - super().__init__(rdclass, rdtype) - self.certificate_type = self._as_uint16(certificate_type) - self.key_tag = self._as_uint16(key_tag) - self.algorithm = self._as_uint8(algorithm) - self.certificate = self._as_bytes(certificate) - - def to_text(self, origin=None, relativize=True, **kw): - certificate_type = _ctype_to_text(self.certificate_type) - return "%s %d %s %s" % ( - certificate_type, - self.key_tag, - dns.dnssectypes.Algorithm.to_text(self.algorithm), - dns.rdata._base64ify(self.certificate, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - certificate_type = _ctype_from_text(tok.get_string()) - key_tag = tok.get_uint16() - algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) - b64 = tok.concatenate_remaining_identifiers().encode() - certificate = base64.b64decode(b64) - return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - prefix = struct.pack( - "!HHB", self.certificate_type, self.key_tag, self.algorithm - ) - file.write(prefix) - file.write(self.certificate) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (certificate_type, key_tag, algorithm) = parser.get_struct("!HHB") - certificate = parser.get_remaining() - return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CNAME.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CNAME.py deleted file mode 100644 index 665e407c..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CNAME.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class CNAME(dns.rdtypes.nsbase.NSBase): - """CNAME record - - Note: although CNAME is officially a singleton type, dnspython allows - non-singleton CNAME rdatasets because such sets have been commonly - used by BIND and other nameservers for load balancing.""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CSYNC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CSYNC.py deleted file mode 100644 index 2f972f6e..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/CSYNC.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdatatype -import dns.rdtypes.util - - -@dns.immutable.immutable -class Bitmap(dns.rdtypes.util.Bitmap): - type_name = "CSYNC" - - -@dns.immutable.immutable -class CSYNC(dns.rdata.Rdata): - """CSYNC record""" - - __slots__ = ["serial", "flags", "windows"] - - def __init__(self, rdclass, rdtype, serial, flags, windows): - super().__init__(rdclass, rdtype) - self.serial = self._as_uint32(serial) - self.flags = self._as_uint16(flags) - if not isinstance(windows, Bitmap): - windows = Bitmap(windows) - self.windows = tuple(windows.windows) - - def to_text(self, origin=None, relativize=True, **kw): - text = Bitmap(self.windows).to_text() - return "%d %d%s" % (self.serial, self.flags, text) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - serial = tok.get_uint32() - flags = tok.get_uint16() - bitmap = Bitmap.from_text(tok) - return cls(rdclass, rdtype, serial, flags, bitmap) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!IH", self.serial, self.flags)) - Bitmap(self.windows).to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (serial, flags) = parser.get_struct("!IH") - bitmap = Bitmap.from_wire_parser(parser) - return cls(rdclass, rdtype, serial, flags, bitmap) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DLV.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DLV.py deleted file mode 100644 index 6c134f18..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DLV.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dsbase - - -@dns.immutable.immutable -class DLV(dns.rdtypes.dsbase.DSBase): - """DLV record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNAME.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNAME.py deleted file mode 100644 index bbf9186c..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNAME.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class DNAME(dns.rdtypes.nsbase.UncompressedNS): - """DNAME record""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.target.to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNSKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNSKEY.py deleted file mode 100644 index 6d961a9f..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DNSKEY.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] - -# pylint: disable=unused-import -from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] - REVOKE, - SEP, - ZONE, -) - -# pylint: enable=unused-import - - -@dns.immutable.immutable -class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): - """DNSKEY record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DS.py deleted file mode 100644 index 58b3108d..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/DS.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dsbase - - -@dns.immutable.immutable -class DS(dns.rdtypes.dsbase.DSBase): - """DS record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI48.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI48.py deleted file mode 100644 index c843be50..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI48.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.euibase - - -@dns.immutable.immutable -class EUI48(dns.rdtypes.euibase.EUIBase): - """EUI48 record""" - - # see: rfc7043.txt - - byte_len = 6 # 0123456789ab (in hex) - text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI64.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI64.py deleted file mode 100644 index f6d7e257..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/EUI64.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.euibase - - -@dns.immutable.immutable -class EUI64(dns.rdtypes.euibase.EUIBase): - """EUI64 record""" - - # see: rfc7043.txt - - byte_len = 8 # 0123456789abcdef (in hex) - text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/GPOS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/GPOS.py deleted file mode 100644 index d79f4a06..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/GPOS.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -def _validate_float_string(what): - if len(what) == 0: - raise dns.exception.FormError - if what[0] == b"-"[0] or what[0] == b"+"[0]: - what = what[1:] - if what.isdigit(): - return - try: - (left, right) = what.split(b".") - except ValueError: - raise dns.exception.FormError - if left == b"" and right == b"": - raise dns.exception.FormError - if not left == b"" and not left.decode().isdigit(): - raise dns.exception.FormError - if not right == b"" and not right.decode().isdigit(): - raise dns.exception.FormError - - -@dns.immutable.immutable -class GPOS(dns.rdata.Rdata): - """GPOS record""" - - # see: RFC 1712 - - __slots__ = ["latitude", "longitude", "altitude"] - - def __init__(self, rdclass, rdtype, latitude, longitude, altitude): - super().__init__(rdclass, rdtype) - if isinstance(latitude, float) or isinstance(latitude, int): - latitude = str(latitude) - if isinstance(longitude, float) or isinstance(longitude, int): - longitude = str(longitude) - if isinstance(altitude, float) or isinstance(altitude, int): - altitude = str(altitude) - latitude = self._as_bytes(latitude, True, 255) - longitude = self._as_bytes(longitude, True, 255) - altitude = self._as_bytes(altitude, True, 255) - _validate_float_string(latitude) - _validate_float_string(longitude) - _validate_float_string(altitude) - self.latitude = latitude - self.longitude = longitude - self.altitude = altitude - flat = self.float_latitude - if flat < -90.0 or flat > 90.0: - raise dns.exception.FormError("bad latitude") - flong = self.float_longitude - if flong < -180.0 or flong > 180.0: - raise dns.exception.FormError("bad longitude") - - def to_text(self, origin=None, relativize=True, **kw): - return ( - f"{self.latitude.decode()} {self.longitude.decode()} " - f"{self.altitude.decode()}" - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - latitude = tok.get_string() - longitude = tok.get_string() - altitude = tok.get_string() - return cls(rdclass, rdtype, latitude, longitude, altitude) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.latitude) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.latitude) - l = len(self.longitude) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.longitude) - l = len(self.altitude) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.altitude) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - latitude = parser.get_counted_bytes() - longitude = parser.get_counted_bytes() - altitude = parser.get_counted_bytes() - return cls(rdclass, rdtype, latitude, longitude, altitude) - - @property - def float_latitude(self): - "latitude as a floating point value" - return float(self.latitude) - - @property - def float_longitude(self): - "longitude as a floating point value" - return float(self.longitude) - - @property - def float_altitude(self): - "altitude as a floating point value" - return float(self.altitude) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HINFO.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HINFO.py deleted file mode 100644 index 06ad3487..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HINFO.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class HINFO(dns.rdata.Rdata): - """HINFO record""" - - # see: RFC 1035 - - __slots__ = ["cpu", "os"] - - def __init__(self, rdclass, rdtype, cpu, os): - super().__init__(rdclass, rdtype) - self.cpu = self._as_bytes(cpu, True, 255) - self.os = self._as_bytes(os, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - return f'"{dns.rdata._escapify(self.cpu)}" "{dns.rdata._escapify(self.os)}"' - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - cpu = tok.get_string(max_length=255) - os = tok.get_string(max_length=255) - return cls(rdclass, rdtype, cpu, os) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.cpu) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.cpu) - l = len(self.os) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.os) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - cpu = parser.get_counted_bytes() - os = parser.get_counted_bytes() - return cls(rdclass, rdtype, cpu, os) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HIP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HIP.py deleted file mode 100644 index f3157da7..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/HIP.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2010, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import binascii -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class HIP(dns.rdata.Rdata): - """HIP record""" - - # see: RFC 5205 - - __slots__ = ["hit", "algorithm", "key", "servers"] - - def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): - super().__init__(rdclass, rdtype) - self.hit = self._as_bytes(hit, True, 255) - self.algorithm = self._as_uint8(algorithm) - self.key = self._as_bytes(key, True) - self.servers = self._as_tuple(servers, self._as_name) - - def to_text(self, origin=None, relativize=True, **kw): - hit = binascii.hexlify(self.hit).decode() - key = base64.b64encode(self.key).replace(b"\n", b"").decode() - text = "" - servers = [] - for server in self.servers: - servers.append(server.choose_relativity(origin, relativize)) - if len(servers) > 0: - text += " " + " ".join(x.to_unicode() for x in servers) - return "%u %s %s%s" % (self.algorithm, hit, key, text) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - hit = binascii.unhexlify(tok.get_string().encode()) - key = base64.b64decode(tok.get_string().encode()) - servers = [] - for token in tok.get_remaining(): - server = tok.as_name(token, origin, relativize, relativize_to) - servers.append(server) - return cls(rdclass, rdtype, hit, algorithm, key, servers) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - lh = len(self.hit) - lk = len(self.key) - file.write(struct.pack("!BBH", lh, self.algorithm, lk)) - file.write(self.hit) - file.write(self.key) - for server in self.servers: - server.to_wire(file, None, origin, False) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (lh, algorithm, lk) = parser.get_struct("!BBH") - hit = parser.get_bytes(lh) - key = parser.get_bytes(lk) - servers = [] - while parser.remaining() > 0: - server = parser.get_name(origin) - servers.append(server) - return cls(rdclass, rdtype, hit, algorithm, key, servers) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ISDN.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ISDN.py deleted file mode 100644 index 6428a0a8..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ISDN.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class ISDN(dns.rdata.Rdata): - """ISDN record""" - - # see: RFC 1183 - - __slots__ = ["address", "subaddress"] - - def __init__(self, rdclass, rdtype, address, subaddress): - super().__init__(rdclass, rdtype) - self.address = self._as_bytes(address, True, 255) - self.subaddress = self._as_bytes(subaddress, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - if self.subaddress: - return ( - f'"{dns.rdata._escapify(self.address)}" ' - f'"{dns.rdata._escapify(self.subaddress)}"' - ) - else: - return f'"{dns.rdata._escapify(self.address)}"' - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - tokens = tok.get_remaining(max_tokens=1) - if len(tokens) >= 1: - subaddress = tokens[0].unescape().value - else: - subaddress = "" - return cls(rdclass, rdtype, address, subaddress) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.address) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.address) - l = len(self.subaddress) - if l > 0: - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.subaddress) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_counted_bytes() - if parser.remaining() > 0: - subaddress = parser.get_counted_bytes() - else: - subaddress = b"" - return cls(rdclass, rdtype, address, subaddress) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L32.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L32.py deleted file mode 100644 index 09804c2d..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L32.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class L32(dns.rdata.Rdata): - """L32 record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "locator32"] - - def __init__(self, rdclass, rdtype, preference, locator32): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.locator32 = self._as_ipv4_address(locator32) - - def to_text(self, origin=None, relativize=True, **kw): - return f"{self.preference} {self.locator32}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - nodeid = tok.get_identifier() - return cls(rdclass, rdtype, preference, nodeid) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - file.write(dns.ipv4.inet_aton(self.locator32)) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - locator32 = parser.get_remaining() - return cls(rdclass, rdtype, preference, locator32) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L64.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L64.py deleted file mode 100644 index fb76808e..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/L64.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdtypes.util - - -@dns.immutable.immutable -class L64(dns.rdata.Rdata): - """L64 record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "locator64"] - - def __init__(self, rdclass, rdtype, preference, locator64): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - if isinstance(locator64, bytes): - if len(locator64) != 8: - raise ValueError("invalid locator64") - self.locator64 = dns.rdata._hexify(locator64, 4, b":") - else: - dns.rdtypes.util.parse_formatted_hex(locator64, 4, 4, ":") - self.locator64 = locator64 - - def to_text(self, origin=None, relativize=True, **kw): - return f"{self.preference} {self.locator64}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - locator64 = tok.get_identifier() - return cls(rdclass, rdtype, preference, locator64) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - file.write(dns.rdtypes.util.parse_formatted_hex(self.locator64, 4, 4, ":")) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - locator64 = parser.get_remaining() - return cls(rdclass, rdtype, preference, locator64) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LOC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LOC.py deleted file mode 100644 index 1153cf03..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LOC.py +++ /dev/null @@ -1,353 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata - -_pows = tuple(10**i for i in range(0, 11)) - -# default values are in centimeters -_default_size = 100.0 -_default_hprec = 1000000.0 -_default_vprec = 1000.0 - -# for use by from_wire() -_MAX_LATITUDE = 0x80000000 + 90 * 3600000 -_MIN_LATITUDE = 0x80000000 - 90 * 3600000 -_MAX_LONGITUDE = 0x80000000 + 180 * 3600000 -_MIN_LONGITUDE = 0x80000000 - 180 * 3600000 - - -def _exponent_of(what, desc): - if what == 0: - return 0 - exp = None - for i, pow in enumerate(_pows): - if what < pow: - exp = i - 1 - break - if exp is None or exp < 0: - raise dns.exception.SyntaxError(f"{desc} value out of bounds") - return exp - - -def _float_to_tuple(what): - if what < 0: - sign = -1 - what *= -1 - else: - sign = 1 - what = round(what * 3600000) - degrees = int(what // 3600000) - what -= degrees * 3600000 - minutes = int(what // 60000) - what -= minutes * 60000 - seconds = int(what // 1000) - what -= int(seconds * 1000) - what = int(what) - return (degrees, minutes, seconds, what, sign) - - -def _tuple_to_float(what): - value = float(what[0]) - value += float(what[1]) / 60.0 - value += float(what[2]) / 3600.0 - value += float(what[3]) / 3600000.0 - return float(what[4]) * value - - -def _encode_size(what, desc): - what = int(what) - exponent = _exponent_of(what, desc) & 0xF - base = what // pow(10, exponent) & 0xF - return base * 16 + exponent - - -def _decode_size(what, desc): - exponent = what & 0x0F - if exponent > 9: - raise dns.exception.FormError(f"bad {desc} exponent") - base = (what & 0xF0) >> 4 - if base > 9: - raise dns.exception.FormError(f"bad {desc} base") - return base * pow(10, exponent) - - -def _check_coordinate_list(value, low, high): - if value[0] < low or value[0] > high: - raise ValueError(f"not in range [{low}, {high}]") - if value[1] < 0 or value[1] > 59: - raise ValueError("bad minutes value") - if value[2] < 0 or value[2] > 59: - raise ValueError("bad seconds value") - if value[3] < 0 or value[3] > 999: - raise ValueError("bad milliseconds value") - if value[4] != 1 and value[4] != -1: - raise ValueError("bad hemisphere value") - - -@dns.immutable.immutable -class LOC(dns.rdata.Rdata): - """LOC record""" - - # see: RFC 1876 - - __slots__ = [ - "latitude", - "longitude", - "altitude", - "size", - "horizontal_precision", - "vertical_precision", - ] - - def __init__( - self, - rdclass, - rdtype, - latitude, - longitude, - altitude, - size=_default_size, - hprec=_default_hprec, - vprec=_default_vprec, - ): - """Initialize a LOC record instance. - - The parameters I{latitude} and I{longitude} may be either a 4-tuple - of integers specifying (degrees, minutes, seconds, milliseconds), - or they may be floating point values specifying the number of - degrees. The other parameters are floats. Size, horizontal precision, - and vertical precision are specified in centimeters.""" - - super().__init__(rdclass, rdtype) - if isinstance(latitude, int): - latitude = float(latitude) - if isinstance(latitude, float): - latitude = _float_to_tuple(latitude) - _check_coordinate_list(latitude, -90, 90) - self.latitude = tuple(latitude) - if isinstance(longitude, int): - longitude = float(longitude) - if isinstance(longitude, float): - longitude = _float_to_tuple(longitude) - _check_coordinate_list(longitude, -180, 180) - self.longitude = tuple(longitude) - self.altitude = float(altitude) - self.size = float(size) - self.horizontal_precision = float(hprec) - self.vertical_precision = float(vprec) - - def to_text(self, origin=None, relativize=True, **kw): - if self.latitude[4] > 0: - lat_hemisphere = "N" - else: - lat_hemisphere = "S" - if self.longitude[4] > 0: - long_hemisphere = "E" - else: - long_hemisphere = "W" - text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( - self.latitude[0], - self.latitude[1], - self.latitude[2], - self.latitude[3], - lat_hemisphere, - self.longitude[0], - self.longitude[1], - self.longitude[2], - self.longitude[3], - long_hemisphere, - self.altitude / 100.0, - ) - - # do not print default values - if ( - self.size != _default_size - or self.horizontal_precision != _default_hprec - or self.vertical_precision != _default_vprec - ): - text += ( - f" {self.size / 100.0:0.2f}m {self.horizontal_precision / 100.0:0.2f}m" - f" {self.vertical_precision / 100.0:0.2f}m" - ) - return text - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - latitude = [0, 0, 0, 0, 1] - longitude = [0, 0, 0, 0, 1] - size = _default_size - hprec = _default_hprec - vprec = _default_vprec - - latitude[0] = tok.get_int() - t = tok.get_string() - if t.isdigit(): - latitude[1] = int(t) - t = tok.get_string() - if "." in t: - (seconds, milliseconds) = t.split(".") - if not seconds.isdigit(): - raise dns.exception.SyntaxError("bad latitude seconds value") - latitude[2] = int(seconds) - l = len(milliseconds) - if l == 0 or l > 3 or not milliseconds.isdigit(): - raise dns.exception.SyntaxError("bad latitude milliseconds value") - if l == 1: - m = 100 - elif l == 2: - m = 10 - else: - m = 1 - latitude[3] = m * int(milliseconds) - t = tok.get_string() - elif t.isdigit(): - latitude[2] = int(t) - t = tok.get_string() - if t == "S": - latitude[4] = -1 - elif t != "N": - raise dns.exception.SyntaxError("bad latitude hemisphere value") - - longitude[0] = tok.get_int() - t = tok.get_string() - if t.isdigit(): - longitude[1] = int(t) - t = tok.get_string() - if "." in t: - (seconds, milliseconds) = t.split(".") - if not seconds.isdigit(): - raise dns.exception.SyntaxError("bad longitude seconds value") - longitude[2] = int(seconds) - l = len(milliseconds) - if l == 0 or l > 3 or not milliseconds.isdigit(): - raise dns.exception.SyntaxError("bad longitude milliseconds value") - if l == 1: - m = 100 - elif l == 2: - m = 10 - else: - m = 1 - longitude[3] = m * int(milliseconds) - t = tok.get_string() - elif t.isdigit(): - longitude[2] = int(t) - t = tok.get_string() - if t == "W": - longitude[4] = -1 - elif t != "E": - raise dns.exception.SyntaxError("bad longitude hemisphere value") - - t = tok.get_string() - if t[-1] == "m": - t = t[0:-1] - altitude = float(t) * 100.0 # m -> cm - - tokens = tok.get_remaining(max_tokens=3) - if len(tokens) >= 1: - value = tokens[0].unescape().value - if value[-1] == "m": - value = value[0:-1] - size = float(value) * 100.0 # m -> cm - if len(tokens) >= 2: - value = tokens[1].unescape().value - if value[-1] == "m": - value = value[0:-1] - hprec = float(value) * 100.0 # m -> cm - if len(tokens) >= 3: - value = tokens[2].unescape().value - if value[-1] == "m": - value = value[0:-1] - vprec = float(value) * 100.0 # m -> cm - - # Try encoding these now so we raise if they are bad - _encode_size(size, "size") - _encode_size(hprec, "horizontal precision") - _encode_size(vprec, "vertical precision") - - return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - milliseconds = ( - self.latitude[0] * 3600000 - + self.latitude[1] * 60000 - + self.latitude[2] * 1000 - + self.latitude[3] - ) * self.latitude[4] - latitude = 0x80000000 + milliseconds - milliseconds = ( - self.longitude[0] * 3600000 - + self.longitude[1] * 60000 - + self.longitude[2] * 1000 - + self.longitude[3] - ) * self.longitude[4] - longitude = 0x80000000 + milliseconds - altitude = int(self.altitude) + 10000000 - size = _encode_size(self.size, "size") - hprec = _encode_size(self.horizontal_precision, "horizontal precision") - vprec = _encode_size(self.vertical_precision, "vertical precision") - wire = struct.pack( - "!BBBBIII", 0, size, hprec, vprec, latitude, longitude, altitude - ) - file.write(wire) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - ( - version, - size, - hprec, - vprec, - latitude, - longitude, - altitude, - ) = parser.get_struct("!BBBBIII") - if version != 0: - raise dns.exception.FormError("LOC version not zero") - if latitude < _MIN_LATITUDE or latitude > _MAX_LATITUDE: - raise dns.exception.FormError("bad latitude") - if latitude > 0x80000000: - latitude = (latitude - 0x80000000) / 3600000 - else: - latitude = -1 * (0x80000000 - latitude) / 3600000 - if longitude < _MIN_LONGITUDE or longitude > _MAX_LONGITUDE: - raise dns.exception.FormError("bad longitude") - if longitude > 0x80000000: - longitude = (longitude - 0x80000000) / 3600000 - else: - longitude = -1 * (0x80000000 - longitude) / 3600000 - altitude = float(altitude) - 10000000.0 - size = _decode_size(size, "size") - hprec = _decode_size(hprec, "horizontal precision") - vprec = _decode_size(vprec, "vertical precision") - return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) - - @property - def float_latitude(self): - "latitude as a floating point value" - return _tuple_to_float(self.latitude) - - @property - def float_longitude(self): - "longitude as a floating point value" - return _tuple_to_float(self.longitude) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LP.py deleted file mode 100644 index 312663f1..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/LP.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class LP(dns.rdata.Rdata): - """LP record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "fqdn"] - - def __init__(self, rdclass, rdtype, preference, fqdn): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.fqdn = self._as_name(fqdn) - - def to_text(self, origin=None, relativize=True, **kw): - fqdn = self.fqdn.choose_relativity(origin, relativize) - return "%d %s" % (self.preference, fqdn) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - fqdn = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, preference, fqdn) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - self.fqdn.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - fqdn = parser.get_name(origin) - return cls(rdclass, rdtype, preference, fqdn) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/MX.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/MX.py deleted file mode 100644 index 0c300c5a..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/MX.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class MX(dns.rdtypes.mxbase.MXBase): - """MX record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NID.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NID.py deleted file mode 100644 index 2f649178..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NID.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdtypes.util - - -@dns.immutable.immutable -class NID(dns.rdata.Rdata): - """NID record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "nodeid"] - - def __init__(self, rdclass, rdtype, preference, nodeid): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - if isinstance(nodeid, bytes): - if len(nodeid) != 8: - raise ValueError("invalid nodeid") - self.nodeid = dns.rdata._hexify(nodeid, 4, b":") - else: - dns.rdtypes.util.parse_formatted_hex(nodeid, 4, 4, ":") - self.nodeid = nodeid - - def to_text(self, origin=None, relativize=True, **kw): - return f"{self.preference} {self.nodeid}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - nodeid = tok.get_identifier() - return cls(rdclass, rdtype, preference, nodeid) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - file.write(dns.rdtypes.util.parse_formatted_hex(self.nodeid, 4, 4, ":")) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - nodeid = parser.get_remaining() - return cls(rdclass, rdtype, preference, nodeid) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NINFO.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NINFO.py deleted file mode 100644 index b177bddb..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NINFO.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class NINFO(dns.rdtypes.txtbase.TXTBase): - """NINFO record""" - - # see: draft-reid-dnsext-zs-01 diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NS.py deleted file mode 100644 index c3f34ce9..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NS.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class NS(dns.rdtypes.nsbase.NSBase): - """NS record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC.py deleted file mode 100644 index 3c78b722..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdatatype -import dns.rdtypes.util - - -@dns.immutable.immutable -class Bitmap(dns.rdtypes.util.Bitmap): - type_name = "NSEC" - - -@dns.immutable.immutable -class NSEC(dns.rdata.Rdata): - """NSEC record""" - - __slots__ = ["next", "windows"] - - def __init__(self, rdclass, rdtype, next, windows): - super().__init__(rdclass, rdtype) - self.next = self._as_name(next) - if not isinstance(windows, Bitmap): - windows = Bitmap(windows) - self.windows = tuple(windows.windows) - - def to_text(self, origin=None, relativize=True, **kw): - next = self.next.choose_relativity(origin, relativize) - text = Bitmap(self.windows).to_text() - return f"{next}{text}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - next = tok.get_name(origin, relativize, relativize_to) - windows = Bitmap.from_text(tok) - return cls(rdclass, rdtype, next, windows) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - # Note that NSEC downcasing, originally mandated by RFC 4034 - # section 6.2 was removed by RFC 6840 section 5.1. - self.next.to_wire(file, None, origin, False) - Bitmap(self.windows).to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - next = parser.get_name(origin) - bitmap = Bitmap.from_wire_parser(parser) - return cls(rdclass, rdtype, next, bitmap) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3.py deleted file mode 100644 index d71302b7..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import binascii -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.rdatatype -import dns.rdtypes.util - -b32_hex_to_normal = bytes.maketrans( - b"0123456789ABCDEFGHIJKLMNOPQRSTUV", b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" -) -b32_normal_to_hex = bytes.maketrans( - b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", b"0123456789ABCDEFGHIJKLMNOPQRSTUV" -) - -# hash algorithm constants -SHA1 = 1 - -# flag constants -OPTOUT = 1 - - -@dns.immutable.immutable -class Bitmap(dns.rdtypes.util.Bitmap): - type_name = "NSEC3" - - -@dns.immutable.immutable -class NSEC3(dns.rdata.Rdata): - """NSEC3 record""" - - __slots__ = ["algorithm", "flags", "iterations", "salt", "next", "windows"] - - def __init__( - self, rdclass, rdtype, algorithm, flags, iterations, salt, next, windows - ): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_uint8(algorithm) - self.flags = self._as_uint8(flags) - self.iterations = self._as_uint16(iterations) - self.salt = self._as_bytes(salt, True, 255) - self.next = self._as_bytes(next, True, 255) - if not isinstance(windows, Bitmap): - windows = Bitmap(windows) - self.windows = tuple(windows.windows) - - def _next_text(self): - next = base64.b32encode(self.next).translate(b32_normal_to_hex).lower().decode() - next = next.rstrip("=") - return next - - def to_text(self, origin=None, relativize=True, **kw): - next = self._next_text() - if self.salt == b"": - salt = "-" - else: - salt = binascii.hexlify(self.salt).decode() - text = Bitmap(self.windows).to_text() - return "%u %u %u %s %s%s" % ( - self.algorithm, - self.flags, - self.iterations, - salt, - next, - text, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - flags = tok.get_uint8() - iterations = tok.get_uint16() - salt = tok.get_string() - if salt == "-": - salt = b"" - else: - salt = binascii.unhexlify(salt.encode("ascii")) - next = tok.get_string().encode("ascii").upper().translate(b32_hex_to_normal) - if next.endswith(b"="): - raise binascii.Error("Incorrect padding") - if len(next) % 8 != 0: - next += b"=" * (8 - len(next) % 8) - next = base64.b32decode(next) - bitmap = Bitmap.from_text(tok) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.salt) - file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) - file.write(self.salt) - l = len(self.next) - file.write(struct.pack("!B", l)) - file.write(self.next) - Bitmap(self.windows).to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (algorithm, flags, iterations) = parser.get_struct("!BBH") - salt = parser.get_counted_bytes() - next = parser.get_counted_bytes() - bitmap = Bitmap.from_wire_parser(parser) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) - - def next_name(self, origin=None): - return dns.name.from_text(self._next_text(), origin) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py deleted file mode 100644 index d1e62ebc..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.exception -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class NSEC3PARAM(dns.rdata.Rdata): - """NSEC3PARAM record""" - - __slots__ = ["algorithm", "flags", "iterations", "salt"] - - def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_uint8(algorithm) - self.flags = self._as_uint8(flags) - self.iterations = self._as_uint16(iterations) - self.salt = self._as_bytes(salt, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - if self.salt == b"": - salt = "-" - else: - salt = binascii.hexlify(self.salt).decode() - return "%u %u %u %s" % (self.algorithm, self.flags, self.iterations, salt) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - flags = tok.get_uint8() - iterations = tok.get_uint16() - salt = tok.get_string() - if salt == "-": - salt = "" - else: - salt = binascii.unhexlify(salt.encode()) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.salt) - file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) - file.write(self.salt) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (algorithm, flags, iterations) = parser.get_struct("!BBH") - salt = parser.get_counted_bytes() - return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py deleted file mode 100644 index 4d7a4b6c..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class OPENPGPKEY(dns.rdata.Rdata): - """OPENPGPKEY record""" - - # see: RFC 7929 - - def __init__(self, rdclass, rdtype, key): - super().__init__(rdclass, rdtype) - self.key = self._as_bytes(key) - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._base64ify(self.key, chunksize=None, **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - b64 = tok.concatenate_remaining_identifiers().encode() - key = base64.b64decode(b64) - return cls(rdclass, rdtype, key) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.key) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - key = parser.get_remaining() - return cls(rdclass, rdtype, key) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPT.py deleted file mode 100644 index d343dfa5..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/OPT.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.edns -import dns.exception -import dns.immutable -import dns.rdata - -# We don't implement from_text, and that's ok. -# pylint: disable=abstract-method - - -@dns.immutable.immutable -class OPT(dns.rdata.Rdata): - """OPT record""" - - __slots__ = ["options"] - - def __init__(self, rdclass, rdtype, options): - """Initialize an OPT rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata, - which is also the payload size. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - - *options*, a tuple of ``bytes`` - """ - - super().__init__(rdclass, rdtype) - - def as_option(option): - if not isinstance(option, dns.edns.Option): - raise ValueError("option is not a dns.edns.option") - return option - - self.options = self._as_tuple(options, as_option) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - for opt in self.options: - owire = opt.to_wire() - file.write(struct.pack("!HH", opt.otype, len(owire))) - file.write(owire) - - def to_text(self, origin=None, relativize=True, **kw): - return " ".join(opt.to_text() for opt in self.options) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - options = [] - while parser.remaining() > 0: - (otype, olen) = parser.get_struct("!HH") - with parser.restrict_to(olen): - opt = dns.edns.option_from_wire_parser(otype, parser) - options.append(opt) - return cls(rdclass, rdtype, options) - - @property - def payload(self): - "payload size" - return self.rdclass diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/PTR.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/PTR.py deleted file mode 100644 index 98c36167..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/PTR.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class PTR(dns.rdtypes.nsbase.NSBase): - """PTR record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RESINFO.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RESINFO.py deleted file mode 100644 index 76c8ea2a..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RESINFO.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class RESINFO(dns.rdtypes.txtbase.TXTBase): - """RESINFO record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RP.py deleted file mode 100644 index a66cfc50..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RP.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata - - -@dns.immutable.immutable -class RP(dns.rdata.Rdata): - """RP record""" - - # see: RFC 1183 - - __slots__ = ["mbox", "txt"] - - def __init__(self, rdclass, rdtype, mbox, txt): - super().__init__(rdclass, rdtype) - self.mbox = self._as_name(mbox) - self.txt = self._as_name(txt) - - def to_text(self, origin=None, relativize=True, **kw): - mbox = self.mbox.choose_relativity(origin, relativize) - txt = self.txt.choose_relativity(origin, relativize) - return f"{str(mbox)} {str(txt)}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - mbox = tok.get_name(origin, relativize, relativize_to) - txt = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, mbox, txt) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.mbox.to_wire(file, None, origin, canonicalize) - self.txt.to_wire(file, None, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - mbox = parser.get_name(origin) - txt = parser.get_name(origin) - return cls(rdclass, rdtype, mbox, txt) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RRSIG.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RRSIG.py deleted file mode 100644 index 8beb4237..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RRSIG.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import calendar -import struct -import time - -import dns.dnssectypes -import dns.exception -import dns.immutable -import dns.rdata -import dns.rdatatype - - -class BadSigTime(dns.exception.DNSException): - """Time in DNS SIG or RRSIG resource record cannot be parsed.""" - - -def sigtime_to_posixtime(what): - if len(what) <= 10 and what.isdigit(): - return int(what) - if len(what) != 14: - raise BadSigTime - year = int(what[0:4]) - month = int(what[4:6]) - day = int(what[6:8]) - hour = int(what[8:10]) - minute = int(what[10:12]) - second = int(what[12:14]) - return calendar.timegm((year, month, day, hour, minute, second, 0, 0, 0)) - - -def posixtime_to_sigtime(what): - return time.strftime("%Y%m%d%H%M%S", time.gmtime(what)) - - -@dns.immutable.immutable -class RRSIG(dns.rdata.Rdata): - """RRSIG record""" - - __slots__ = [ - "type_covered", - "algorithm", - "labels", - "original_ttl", - "expiration", - "inception", - "key_tag", - "signer", - "signature", - ] - - def __init__( - self, - rdclass, - rdtype, - type_covered, - algorithm, - labels, - original_ttl, - expiration, - inception, - key_tag, - signer, - signature, - ): - super().__init__(rdclass, rdtype) - self.type_covered = self._as_rdatatype(type_covered) - self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) - self.labels = self._as_uint8(labels) - self.original_ttl = self._as_ttl(original_ttl) - self.expiration = self._as_uint32(expiration) - self.inception = self._as_uint32(inception) - self.key_tag = self._as_uint16(key_tag) - self.signer = self._as_name(signer) - self.signature = self._as_bytes(signature) - - def covers(self): - return self.type_covered - - def to_text(self, origin=None, relativize=True, **kw): - return "%s %d %d %d %s %s %d %s %s" % ( - dns.rdatatype.to_text(self.type_covered), - self.algorithm, - self.labels, - self.original_ttl, - posixtime_to_sigtime(self.expiration), - posixtime_to_sigtime(self.inception), - self.key_tag, - self.signer.choose_relativity(origin, relativize), - dns.rdata._base64ify(self.signature, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - type_covered = dns.rdatatype.from_text(tok.get_string()) - algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) - labels = tok.get_int() - original_ttl = tok.get_ttl() - expiration = sigtime_to_posixtime(tok.get_string()) - inception = sigtime_to_posixtime(tok.get_string()) - key_tag = tok.get_int() - signer = tok.get_name(origin, relativize, relativize_to) - b64 = tok.concatenate_remaining_identifiers().encode() - signature = base64.b64decode(b64) - return cls( - rdclass, - rdtype, - type_covered, - algorithm, - labels, - original_ttl, - expiration, - inception, - key_tag, - signer, - signature, - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack( - "!HBBIIIH", - self.type_covered, - self.algorithm, - self.labels, - self.original_ttl, - self.expiration, - self.inception, - self.key_tag, - ) - file.write(header) - self.signer.to_wire(file, None, origin, canonicalize) - file.write(self.signature) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!HBBIIIH") - signer = parser.get_name(origin) - signature = parser.get_remaining() - return cls(rdclass, rdtype, *header, signer, signature) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RT.py deleted file mode 100644 index 5a4d45cf..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/RT.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): - """RT record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SMIMEA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SMIMEA.py deleted file mode 100644 index 55d87bf8..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SMIMEA.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.tlsabase - - -@dns.immutable.immutable -class SMIMEA(dns.rdtypes.tlsabase.TLSABase): - """SMIMEA record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SOA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SOA.py deleted file mode 100644 index 09aa8321..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SOA.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata - - -@dns.immutable.immutable -class SOA(dns.rdata.Rdata): - """SOA record""" - - # see: RFC 1035 - - __slots__ = ["mname", "rname", "serial", "refresh", "retry", "expire", "minimum"] - - def __init__( - self, rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum - ): - super().__init__(rdclass, rdtype) - self.mname = self._as_name(mname) - self.rname = self._as_name(rname) - self.serial = self._as_uint32(serial) - self.refresh = self._as_ttl(refresh) - self.retry = self._as_ttl(retry) - self.expire = self._as_ttl(expire) - self.minimum = self._as_ttl(minimum) - - def to_text(self, origin=None, relativize=True, **kw): - mname = self.mname.choose_relativity(origin, relativize) - rname = self.rname.choose_relativity(origin, relativize) - return "%s %s %d %d %d %d %d" % ( - mname, - rname, - self.serial, - self.refresh, - self.retry, - self.expire, - self.minimum, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - mname = tok.get_name(origin, relativize, relativize_to) - rname = tok.get_name(origin, relativize, relativize_to) - serial = tok.get_uint32() - refresh = tok.get_ttl() - retry = tok.get_ttl() - expire = tok.get_ttl() - minimum = tok.get_ttl() - return cls( - rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.mname.to_wire(file, compress, origin, canonicalize) - self.rname.to_wire(file, compress, origin, canonicalize) - five_ints = struct.pack( - "!IIIII", self.serial, self.refresh, self.retry, self.expire, self.minimum - ) - file.write(five_ints) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - mname = parser.get_name(origin) - rname = parser.get_name(origin) - return cls(rdclass, rdtype, mname, rname, *parser.get_struct("!IIIII")) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SPF.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SPF.py deleted file mode 100644 index 1df3b705..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SPF.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class SPF(dns.rdtypes.txtbase.TXTBase): - """SPF record""" - - # see: RFC 4408 diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SSHFP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SSHFP.py deleted file mode 100644 index d2c4b073..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/SSHFP.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class SSHFP(dns.rdata.Rdata): - """SSHFP record""" - - # See RFC 4255 - - __slots__ = ["algorithm", "fp_type", "fingerprint"] - - def __init__(self, rdclass, rdtype, algorithm, fp_type, fingerprint): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_uint8(algorithm) - self.fp_type = self._as_uint8(fp_type) - self.fingerprint = self._as_bytes(fingerprint, True) - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %s" % ( - self.algorithm, - self.fp_type, - dns.rdata._hexify(self.fingerprint, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - fp_type = tok.get_uint8() - fingerprint = tok.concatenate_remaining_identifiers().encode() - fingerprint = binascii.unhexlify(fingerprint) - return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!BB", self.algorithm, self.fp_type) - file.write(header) - file.write(self.fingerprint) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("BB") - fingerprint = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TKEY.py deleted file mode 100644 index 75f62249..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TKEY.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.exception -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class TKEY(dns.rdata.Rdata): - """TKEY Record""" - - __slots__ = [ - "algorithm", - "inception", - "expiration", - "mode", - "error", - "key", - "other", - ] - - def __init__( - self, - rdclass, - rdtype, - algorithm, - inception, - expiration, - mode, - error, - key, - other=b"", - ): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_name(algorithm) - self.inception = self._as_uint32(inception) - self.expiration = self._as_uint32(expiration) - self.mode = self._as_uint16(mode) - self.error = self._as_uint16(error) - self.key = self._as_bytes(key) - self.other = self._as_bytes(other) - - def to_text(self, origin=None, relativize=True, **kw): - _algorithm = self.algorithm.choose_relativity(origin, relativize) - text = "%s %u %u %u %u %s" % ( - str(_algorithm), - self.inception, - self.expiration, - self.mode, - self.error, - dns.rdata._base64ify(self.key, 0), - ) - if len(self.other) > 0: - text += f" {dns.rdata._base64ify(self.other, 0)}" - - return text - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_name(relativize=False) - inception = tok.get_uint32() - expiration = tok.get_uint32() - mode = tok.get_uint16() - error = tok.get_uint16() - key_b64 = tok.get_string().encode() - key = base64.b64decode(key_b64) - other_b64 = tok.concatenate_remaining_identifiers(True).encode() - other = base64.b64decode(other_b64) - - return cls( - rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.algorithm.to_wire(file, compress, origin) - file.write( - struct.pack("!IIHH", self.inception, self.expiration, self.mode, self.error) - ) - file.write(struct.pack("!H", len(self.key))) - file.write(self.key) - file.write(struct.pack("!H", len(self.other))) - if len(self.other) > 0: - file.write(self.other) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - algorithm = parser.get_name(origin) - inception, expiration, mode, error = parser.get_struct("!IIHH") - key = parser.get_counted_bytes(2) - other = parser.get_counted_bytes(2) - - return cls( - rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other - ) - - # Constants for the mode field - from RFC 2930: - # 2.5 The Mode Field - # - # The mode field specifies the general scheme for key agreement or - # the purpose of the TKEY DNS message. Servers and resolvers - # supporting this specification MUST implement the Diffie-Hellman key - # agreement mode and the key deletion mode for queries. All other - # modes are OPTIONAL. A server supporting TKEY that receives a TKEY - # request with a mode it does not support returns the BADMODE error. - # The following values of the Mode octet are defined, available, or - # reserved: - # - # Value Description - # ----- ----------- - # 0 - reserved, see section 7 - # 1 server assignment - # 2 Diffie-Hellman exchange - # 3 GSS-API negotiation - # 4 resolver assignment - # 5 key deletion - # 6-65534 - available, see section 7 - # 65535 - reserved, see section 7 - SERVER_ASSIGNMENT = 1 - DIFFIE_HELLMAN_EXCHANGE = 2 - GSSAPI_NEGOTIATION = 3 - RESOLVER_ASSIGNMENT = 4 - KEY_DELETION = 5 diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TLSA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TLSA.py deleted file mode 100644 index 4dffc553..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TLSA.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.tlsabase - - -@dns.immutable.immutable -class TLSA(dns.rdtypes.tlsabase.TLSABase): - """TLSA record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TSIG.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TSIG.py deleted file mode 100644 index 79423826..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TSIG.py +++ /dev/null @@ -1,160 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.exception -import dns.immutable -import dns.rcode -import dns.rdata - - -@dns.immutable.immutable -class TSIG(dns.rdata.Rdata): - """TSIG record""" - - __slots__ = [ - "algorithm", - "time_signed", - "fudge", - "mac", - "original_id", - "error", - "other", - ] - - def __init__( - self, - rdclass, - rdtype, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ): - """Initialize a TSIG rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - - *algorithm*, a ``dns.name.Name``. - - *time_signed*, an ``int``. - - *fudge*, an ``int`. - - *mac*, a ``bytes`` - - *original_id*, an ``int`` - - *error*, an ``int`` - - *other*, a ``bytes`` - """ - - super().__init__(rdclass, rdtype) - self.algorithm = self._as_name(algorithm) - self.time_signed = self._as_uint48(time_signed) - self.fudge = self._as_uint16(fudge) - self.mac = self._as_bytes(mac) - self.original_id = self._as_uint16(original_id) - self.error = dns.rcode.Rcode.make(error) - self.other = self._as_bytes(other) - - def to_text(self, origin=None, relativize=True, **kw): - algorithm = self.algorithm.choose_relativity(origin, relativize) - error = dns.rcode.to_text(self.error, True) - text = ( - f"{algorithm} {self.time_signed} {self.fudge} " - + f"{len(self.mac)} {dns.rdata._base64ify(self.mac, 0)} " - + f"{self.original_id} {error} {len(self.other)}" - ) - if self.other: - text += f" {dns.rdata._base64ify(self.other, 0)}" - return text - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_name(relativize=False) - time_signed = tok.get_uint48() - fudge = tok.get_uint16() - mac_len = tok.get_uint16() - mac = base64.b64decode(tok.get_string()) - if len(mac) != mac_len: - raise SyntaxError("invalid MAC") - original_id = tok.get_uint16() - error = dns.rcode.from_text(tok.get_string()) - other_len = tok.get_uint16() - if other_len > 0: - other = base64.b64decode(tok.get_string()) - if len(other) != other_len: - raise SyntaxError("invalid other data") - else: - other = b"" - return cls( - rdclass, - rdtype, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.algorithm.to_wire(file, None, origin, False) - file.write( - struct.pack( - "!HIHH", - (self.time_signed >> 32) & 0xFFFF, - self.time_signed & 0xFFFFFFFF, - self.fudge, - len(self.mac), - ) - ) - file.write(self.mac) - file.write(struct.pack("!HHH", self.original_id, self.error, len(self.other))) - file.write(self.other) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - algorithm = parser.get_name() - time_signed = parser.get_uint48() - fudge = parser.get_uint16() - mac = parser.get_counted_bytes(2) - (original_id, error) = parser.get_struct("!HH") - other = parser.get_counted_bytes(2) - return cls( - rdclass, - rdtype, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TXT.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TXT.py deleted file mode 100644 index 6d4dae27..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/TXT.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class TXT(dns.rdtypes.txtbase.TXTBase): - """TXT record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/URI.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/URI.py deleted file mode 100644 index 2efbb305..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/URI.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# Copyright (C) 2015 Red Hat, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class URI(dns.rdata.Rdata): - """URI record""" - - # see RFC 7553 - - __slots__ = ["priority", "weight", "target"] - - def __init__(self, rdclass, rdtype, priority, weight, target): - super().__init__(rdclass, rdtype) - self.priority = self._as_uint16(priority) - self.weight = self._as_uint16(weight) - self.target = self._as_bytes(target, True) - if len(self.target) == 0: - raise dns.exception.SyntaxError("URI target cannot be empty") - - def to_text(self, origin=None, relativize=True, **kw): - return '%d %d "%s"' % (self.priority, self.weight, self.target.decode()) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - priority = tok.get_uint16() - weight = tok.get_uint16() - target = tok.get().unescape() - if not (target.is_quoted_string() or target.is_identifier()): - raise dns.exception.SyntaxError("URI target must be a string") - return cls(rdclass, rdtype, priority, weight, target.value) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - two_ints = struct.pack("!HH", self.priority, self.weight) - file.write(two_ints) - file.write(self.target) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (priority, weight) = parser.get_struct("!HH") - target = parser.get_remaining() - if len(target) == 0: - raise dns.exception.FormError("URI target may not be empty") - return cls(rdclass, rdtype, priority, weight, target) - - def _processing_priority(self): - return self.priority - - def _processing_weight(self): - return self.weight - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/WALLET.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/WALLET.py deleted file mode 100644 index ff464763..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/WALLET.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class WALLET(dns.rdtypes.txtbase.TXTBase): - """WALLET record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/X25.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/X25.py deleted file mode 100644 index 2436ddb6..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/X25.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class X25(dns.rdata.Rdata): - """X25 record""" - - # see RFC 1183 - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_bytes(address, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - return f'"{dns.rdata._escapify(self.address)}"' - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.address) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.address) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_counted_bytes() - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ZONEMD.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ZONEMD.py deleted file mode 100644 index c90e3ee1..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/ZONEMD.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import binascii -import struct - -import dns.immutable -import dns.rdata -import dns.rdatatype -import dns.zonetypes - - -@dns.immutable.immutable -class ZONEMD(dns.rdata.Rdata): - """ZONEMD record""" - - # See RFC 8976 - - __slots__ = ["serial", "scheme", "hash_algorithm", "digest"] - - def __init__(self, rdclass, rdtype, serial, scheme, hash_algorithm, digest): - super().__init__(rdclass, rdtype) - self.serial = self._as_uint32(serial) - self.scheme = dns.zonetypes.DigestScheme.make(scheme) - self.hash_algorithm = dns.zonetypes.DigestHashAlgorithm.make(hash_algorithm) - self.digest = self._as_bytes(digest) - - if self.scheme == 0: # reserved, RFC 8976 Sec. 5.2 - raise ValueError("scheme 0 is reserved") - if self.hash_algorithm == 0: # reserved, RFC 8976 Sec. 5.3 - raise ValueError("hash_algorithm 0 is reserved") - - hasher = dns.zonetypes._digest_hashers.get(self.hash_algorithm) - if hasher and hasher().digest_size != len(self.digest): - raise ValueError("digest length inconsistent with hash algorithm") - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %d %s" % ( - self.serial, - self.scheme, - self.hash_algorithm, - dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - serial = tok.get_uint32() - scheme = tok.get_uint8() - hash_algorithm = tok.get_uint8() - digest = tok.concatenate_remaining_identifiers().encode() - digest = binascii.unhexlify(digest) - return cls(rdclass, rdtype, serial, scheme, hash_algorithm, digest) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!IBB", self.serial, self.scheme, self.hash_algorithm) - file.write(header) - file.write(self.digest) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!IBB") - digest = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__init__.py deleted file mode 100644 index 647b215b..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__init__.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class ANY (generic) rdata type classes.""" - -__all__ = [ - "AFSDB", - "AMTRELAY", - "AVC", - "CAA", - "CDNSKEY", - "CDS", - "CERT", - "CNAME", - "CSYNC", - "DLV", - "DNAME", - "DNSKEY", - "DS", - "EUI48", - "EUI64", - "GPOS", - "HINFO", - "HIP", - "ISDN", - "L32", - "L64", - "LOC", - "LP", - "MX", - "NID", - "NINFO", - "NS", - "NSEC", - "NSEC3", - "NSEC3PARAM", - "OPENPGPKEY", - "OPT", - "PTR", - "RESINFO", - "RP", - "RRSIG", - "RT", - "SMIMEA", - "SOA", - "SPF", - "SSHFP", - "TKEY", - "TLSA", - "TSIG", - "TXT", - "URI", - "WALLET", - "X25", - "ZONEMD", -] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc deleted file mode 100644 index 0ff07978..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc deleted file mode 100644 index d46cdca5..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc deleted file mode 100644 index 35d20793..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc deleted file mode 100644 index ae6005fa..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc deleted file mode 100644 index 0105e265..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc deleted file mode 100644 index 1a031740..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc deleted file mode 100644 index 4c68b8af..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc deleted file mode 100644 index 837c7c95..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc deleted file mode 100644 index 2e9fc85a..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc deleted file mode 100644 index 5c1c59d9..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc deleted file mode 100644 index 5572c7a0..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc deleted file mode 100644 index 18558e54..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc deleted file mode 100644 index 9311cc30..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc deleted file mode 100644 index cb8ba1d3..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc deleted file mode 100644 index fbabc8b5..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc deleted file mode 100644 index 264e3b8b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc deleted file mode 100644 index 9f7e9f46..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc deleted file mode 100644 index 27117b40..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc deleted file mode 100644 index 1b0b7bc0..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc deleted file mode 100644 index 15390f85..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc deleted file mode 100644 index d608aecd..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc deleted file mode 100644 index 13d05374..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc deleted file mode 100644 index b1f74e17..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc deleted file mode 100644 index 27805db9..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc deleted file mode 100644 index c7741a40..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc deleted file mode 100644 index 10b068d7..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc deleted file mode 100644 index 07341437..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc deleted file mode 100644 index 78145c94..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc deleted file mode 100644 index ddec9d5f..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc deleted file mode 100644 index ef939026..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc deleted file mode 100644 index 45a96a80..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc deleted file mode 100644 index cc10fd33..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc deleted file mode 100644 index a2e3738f..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc deleted file mode 100644 index c02ebb7a..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc deleted file mode 100644 index 4b47c632..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc deleted file mode 100644 index b23af87b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc deleted file mode 100644 index 9dae81f4..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc deleted file mode 100644 index bee597ec..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc deleted file mode 100644 index 0d13de25..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc deleted file mode 100644 index 54d97dc2..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc deleted file mode 100644 index 64c2a168..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc deleted file mode 100644 index 63933d0f..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc deleted file mode 100644 index f88c4b85..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc deleted file mode 100644 index 39108248..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc deleted file mode 100644 index c661dd2c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc deleted file mode 100644 index fb635c57..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc deleted file mode 100644 index 1f3d142e..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc deleted file mode 100644 index 68ba7548..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc deleted file mode 100644 index 75a824ca..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 63111386..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/A.py b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/A.py deleted file mode 100644 index 832e8d3a..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/A.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class A(dns.rdata.Rdata): - """A record for Chaosnet""" - - # domain: the domain of the address - # address: the 16-bit address - - __slots__ = ["domain", "address"] - - def __init__(self, rdclass, rdtype, domain, address): - super().__init__(rdclass, rdtype) - self.domain = self._as_name(domain) - self.address = self._as_uint16(address) - - def to_text(self, origin=None, relativize=True, **kw): - domain = self.domain.choose_relativity(origin, relativize) - return f"{domain} {self.address:o}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - domain = tok.get_name(origin, relativize, relativize_to) - address = tok.get_uint16(base=8) - return cls(rdclass, rdtype, domain, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.domain.to_wire(file, compress, origin, canonicalize) - pref = struct.pack("!H", self.address) - file.write(pref) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - domain = parser.get_name(origin) - address = parser.get_uint16() - return cls(rdclass, rdtype, domain, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__init__.py deleted file mode 100644 index 0760c26c..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class CH rdata type classes.""" - -__all__ = [ - "A", -] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-312.pyc deleted file mode 100644 index e00be08e..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 08e8dbc7..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/A.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/A.py deleted file mode 100644 index e09d6110..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/A.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class A(dns.rdata.Rdata): - """A record.""" - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_ipv4_address(address) - - def to_text(self, origin=None, relativize=True, **kw): - return self.address - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_identifier() - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(dns.ipv4.inet_aton(self.address)) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_remaining() - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/AAAA.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/AAAA.py deleted file mode 100644 index 0cd139e7..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/AAAA.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.ipv6 -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class AAAA(dns.rdata.Rdata): - """AAAA record.""" - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_ipv6_address(address) - - def to_text(self, origin=None, relativize=True, **kw): - return self.address - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_identifier() - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(dns.ipv6.inet_aton(self.address)) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_remaining() - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/APL.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/APL.py deleted file mode 100644 index 44cb3fef..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/APL.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import codecs -import struct - -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.ipv6 -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class APLItem: - """An APL list item.""" - - __slots__ = ["family", "negation", "address", "prefix"] - - def __init__(self, family, negation, address, prefix): - self.family = dns.rdata.Rdata._as_uint16(family) - self.negation = dns.rdata.Rdata._as_bool(negation) - if self.family == 1: - self.address = dns.rdata.Rdata._as_ipv4_address(address) - self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 32) - elif self.family == 2: - self.address = dns.rdata.Rdata._as_ipv6_address(address) - self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 128) - else: - self.address = dns.rdata.Rdata._as_bytes(address, max_length=127) - self.prefix = dns.rdata.Rdata._as_uint8(prefix) - - def __str__(self): - if self.negation: - return "!%d:%s/%s" % (self.family, self.address, self.prefix) - else: - return "%d:%s/%s" % (self.family, self.address, self.prefix) - - def to_wire(self, file): - if self.family == 1: - address = dns.ipv4.inet_aton(self.address) - elif self.family == 2: - address = dns.ipv6.inet_aton(self.address) - else: - address = binascii.unhexlify(self.address) - # - # Truncate least significant zero bytes. - # - last = 0 - for i in range(len(address) - 1, -1, -1): - if address[i] != 0: - last = i + 1 - break - address = address[0:last] - l = len(address) - assert l < 128 - if self.negation: - l |= 0x80 - header = struct.pack("!HBB", self.family, self.prefix, l) - file.write(header) - file.write(address) - - -@dns.immutable.immutable -class APL(dns.rdata.Rdata): - """APL record.""" - - # see: RFC 3123 - - __slots__ = ["items"] - - def __init__(self, rdclass, rdtype, items): - super().__init__(rdclass, rdtype) - for item in items: - if not isinstance(item, APLItem): - raise ValueError("item not an APLItem") - self.items = tuple(items) - - def to_text(self, origin=None, relativize=True, **kw): - return " ".join(map(str, self.items)) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - items = [] - for token in tok.get_remaining(): - item = token.unescape().value - if item[0] == "!": - negation = True - item = item[1:] - else: - negation = False - (family, rest) = item.split(":", 1) - family = int(family) - (address, prefix) = rest.split("/", 1) - prefix = int(prefix) - item = APLItem(family, negation, address, prefix) - items.append(item) - - return cls(rdclass, rdtype, items) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - for item in self.items: - item.to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - items = [] - while parser.remaining() > 0: - header = parser.get_struct("!HBB") - afdlen = header[2] - if afdlen > 127: - negation = True - afdlen -= 128 - else: - negation = False - address = parser.get_bytes(afdlen) - l = len(address) - if header[0] == 1: - if l < 4: - address += b"\x00" * (4 - l) - elif header[0] == 2: - if l < 16: - address += b"\x00" * (16 - l) - else: - # - # This isn't really right according to the RFC, but it - # seems better than throwing an exception - # - address = codecs.encode(address, "hex_codec") - item = APLItem(header[0], negation, address, header[1]) - items.append(item) - return cls(rdclass, rdtype, items) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/DHCID.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/DHCID.py deleted file mode 100644 index 723492fa..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/DHCID.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 - -import dns.exception -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class DHCID(dns.rdata.Rdata): - """DHCID record""" - - # see: RFC 4701 - - __slots__ = ["data"] - - def __init__(self, rdclass, rdtype, data): - super().__init__(rdclass, rdtype) - self.data = self._as_bytes(data) - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._base64ify(self.data, **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - b64 = tok.concatenate_remaining_identifiers().encode() - data = base64.b64decode(b64) - return cls(rdclass, rdtype, data) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.data) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - data = parser.get_remaining() - return cls(rdclass, rdtype, data) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/HTTPS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/HTTPS.py deleted file mode 100644 index 15464cbd..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/HTTPS.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.svcbbase - - -@dns.immutable.immutable -class HTTPS(dns.rdtypes.svcbbase.SVCBBase): - """HTTPS record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/IPSECKEY.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/IPSECKEY.py deleted file mode 100644 index e3a66157..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/IPSECKEY.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.exception -import dns.immutable -import dns.rdtypes.util - - -class Gateway(dns.rdtypes.util.Gateway): - name = "IPSECKEY gateway" - - -@dns.immutable.immutable -class IPSECKEY(dns.rdata.Rdata): - """IPSECKEY record""" - - # see: RFC 4025 - - __slots__ = ["precedence", "gateway_type", "algorithm", "gateway", "key"] - - def __init__( - self, rdclass, rdtype, precedence, gateway_type, algorithm, gateway, key - ): - super().__init__(rdclass, rdtype) - gateway = Gateway(gateway_type, gateway) - self.precedence = self._as_uint8(precedence) - self.gateway_type = gateway.type - self.algorithm = self._as_uint8(algorithm) - self.gateway = gateway.gateway - self.key = self._as_bytes(key) - - def to_text(self, origin=None, relativize=True, **kw): - gateway = Gateway(self.gateway_type, self.gateway).to_text(origin, relativize) - return "%d %d %d %s %s" % ( - self.precedence, - self.gateway_type, - self.algorithm, - gateway, - dns.rdata._base64ify(self.key, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - precedence = tok.get_uint8() - gateway_type = tok.get_uint8() - algorithm = tok.get_uint8() - gateway = Gateway.from_text( - gateway_type, tok, origin, relativize, relativize_to - ) - b64 = tok.concatenate_remaining_identifiers().encode() - key = base64.b64decode(b64) - return cls( - rdclass, rdtype, precedence, gateway_type, algorithm, gateway.gateway, key - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!BBB", self.precedence, self.gateway_type, self.algorithm) - file.write(header) - Gateway(self.gateway_type, self.gateway).to_wire( - file, compress, origin, canonicalize - ) - file.write(self.key) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!BBB") - gateway_type = header[1] - gateway = Gateway.from_wire_parser(gateway_type, parser, origin) - key = parser.get_remaining() - return cls( - rdclass, rdtype, header[0], gateway_type, header[2], gateway.gateway, key - ) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/KX.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/KX.py deleted file mode 100644 index 6073df47..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/KX.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class KX(dns.rdtypes.mxbase.UncompressedDowncasingMX): - """KX record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NAPTR.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NAPTR.py deleted file mode 100644 index 195d1cba..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NAPTR.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -def _write_string(file, s): - l = len(s) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(s) - - -@dns.immutable.immutable -class NAPTR(dns.rdata.Rdata): - """NAPTR record""" - - # see: RFC 3403 - - __slots__ = ["order", "preference", "flags", "service", "regexp", "replacement"] - - def __init__( - self, rdclass, rdtype, order, preference, flags, service, regexp, replacement - ): - super().__init__(rdclass, rdtype) - self.flags = self._as_bytes(flags, True, 255) - self.service = self._as_bytes(service, True, 255) - self.regexp = self._as_bytes(regexp, True, 255) - self.order = self._as_uint16(order) - self.preference = self._as_uint16(preference) - self.replacement = self._as_name(replacement) - - def to_text(self, origin=None, relativize=True, **kw): - replacement = self.replacement.choose_relativity(origin, relativize) - return '%d %d "%s" "%s" "%s" %s' % ( - self.order, - self.preference, - dns.rdata._escapify(self.flags), - dns.rdata._escapify(self.service), - dns.rdata._escapify(self.regexp), - replacement, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - order = tok.get_uint16() - preference = tok.get_uint16() - flags = tok.get_string() - service = tok.get_string() - regexp = tok.get_string() - replacement = tok.get_name(origin, relativize, relativize_to) - return cls( - rdclass, rdtype, order, preference, flags, service, regexp, replacement - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - two_ints = struct.pack("!HH", self.order, self.preference) - file.write(two_ints) - _write_string(file, self.flags) - _write_string(file, self.service) - _write_string(file, self.regexp) - self.replacement.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (order, preference) = parser.get_struct("!HH") - strings = [] - for _ in range(3): - s = parser.get_counted_bytes() - strings.append(s) - replacement = parser.get_name(origin) - return cls( - rdclass, - rdtype, - order, - preference, - strings[0], - strings[1], - strings[2], - replacement, - ) - - def _processing_priority(self): - return (self.order, self.preference) - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP.py deleted file mode 100644 index d55edb73..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class NSAP(dns.rdata.Rdata): - """NSAP record.""" - - # see: RFC 1706 - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_bytes(address) - - def to_text(self, origin=None, relativize=True, **kw): - return f"0x{binascii.hexlify(self.address).decode()}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - if address[0:2] != "0x": - raise dns.exception.SyntaxError("string does not start with 0x") - address = address[2:].replace(".", "") - if len(address) % 2 != 0: - raise dns.exception.SyntaxError("hexstring has odd length") - address = binascii.unhexlify(address.encode()) - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.address) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_remaining() - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP_PTR.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP_PTR.py deleted file mode 100644 index ce1c6632..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/NSAP_PTR.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): - """NSAP-PTR record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/PX.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/PX.py deleted file mode 100644 index cdca1532..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/PX.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class PX(dns.rdata.Rdata): - """PX record.""" - - # see: RFC 2163 - - __slots__ = ["preference", "map822", "mapx400"] - - def __init__(self, rdclass, rdtype, preference, map822, mapx400): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.map822 = self._as_name(map822) - self.mapx400 = self._as_name(mapx400) - - def to_text(self, origin=None, relativize=True, **kw): - map822 = self.map822.choose_relativity(origin, relativize) - mapx400 = self.mapx400.choose_relativity(origin, relativize) - return "%d %s %s" % (self.preference, map822, mapx400) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - map822 = tok.get_name(origin, relativize, relativize_to) - mapx400 = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, preference, map822, mapx400) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - pref = struct.pack("!H", self.preference) - file.write(pref) - self.map822.to_wire(file, None, origin, canonicalize) - self.mapx400.to_wire(file, None, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - map822 = parser.get_name(origin) - mapx400 = parser.get_name(origin) - return cls(rdclass, rdtype, preference, map822, mapx400) - - def _processing_priority(self): - return self.preference - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SRV.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SRV.py deleted file mode 100644 index 5adef98f..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SRV.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class SRV(dns.rdata.Rdata): - """SRV record""" - - # see: RFC 2782 - - __slots__ = ["priority", "weight", "port", "target"] - - def __init__(self, rdclass, rdtype, priority, weight, port, target): - super().__init__(rdclass, rdtype) - self.priority = self._as_uint16(priority) - self.weight = self._as_uint16(weight) - self.port = self._as_uint16(port) - self.target = self._as_name(target) - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - return "%d %d %d %s" % (self.priority, self.weight, self.port, target) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - priority = tok.get_uint16() - weight = tok.get_uint16() - port = tok.get_uint16() - target = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, priority, weight, port, target) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) - file.write(three_ints) - self.target.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (priority, weight, port) = parser.get_struct("!HHH") - target = parser.get_name(origin) - return cls(rdclass, rdtype, priority, weight, port, target) - - def _processing_priority(self): - return self.priority - - def _processing_weight(self): - return self.weight - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SVCB.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SVCB.py deleted file mode 100644 index ff3e9327..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/SVCB.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.svcbbase - - -@dns.immutable.immutable -class SVCB(dns.rdtypes.svcbbase.SVCBBase): - """SVCB record""" diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/WKS.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/WKS.py deleted file mode 100644 index 881a7849..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/WKS.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import socket -import struct - -import dns.immutable -import dns.ipv4 -import dns.rdata - -try: - _proto_tcp = socket.getprotobyname("tcp") - _proto_udp = socket.getprotobyname("udp") -except OSError: - # Fall back to defaults in case /etc/protocols is unavailable. - _proto_tcp = 6 - _proto_udp = 17 - - -@dns.immutable.immutable -class WKS(dns.rdata.Rdata): - """WKS record""" - - # see: RFC 1035 - - __slots__ = ["address", "protocol", "bitmap"] - - def __init__(self, rdclass, rdtype, address, protocol, bitmap): - super().__init__(rdclass, rdtype) - self.address = self._as_ipv4_address(address) - self.protocol = self._as_uint8(protocol) - self.bitmap = self._as_bytes(bitmap) - - def to_text(self, origin=None, relativize=True, **kw): - bits = [] - for i, byte in enumerate(self.bitmap): - for j in range(0, 8): - if byte & (0x80 >> j): - bits.append(str(i * 8 + j)) - text = " ".join(bits) - return "%s %d %s" % (self.address, self.protocol, text) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - protocol = tok.get_string() - if protocol.isdigit(): - protocol = int(protocol) - else: - protocol = socket.getprotobyname(protocol) - bitmap = bytearray() - for token in tok.get_remaining(): - value = token.unescape().value - if value.isdigit(): - serv = int(value) - else: - if protocol != _proto_udp and protocol != _proto_tcp: - raise NotImplementedError("protocol must be TCP or UDP") - if protocol == _proto_udp: - protocol_text = "udp" - else: - protocol_text = "tcp" - serv = socket.getservbyname(value, protocol_text) - i = serv // 8 - l = len(bitmap) - if l < i + 1: - for _ in range(l, i + 1): - bitmap.append(0) - bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) - bitmap = dns.rdata._truncate_bitmap(bitmap) - return cls(rdclass, rdtype, address, protocol, bitmap) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(dns.ipv4.inet_aton(self.address)) - protocol = struct.pack("!B", self.protocol) - file.write(protocol) - file.write(self.bitmap) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_bytes(4) - protocol = parser.get_uint8() - bitmap = parser.get_remaining() - return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__init__.py deleted file mode 100644 index dcec4dd2..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class IN rdata type classes.""" - -__all__ = [ - "A", - "AAAA", - "APL", - "DHCID", - "HTTPS", - "IPSECKEY", - "KX", - "NAPTR", - "NSAP", - "NSAP_PTR", - "PX", - "SRV", - "SVCB", - "WKS", -] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-312.pyc deleted file mode 100644 index 3c37e20d..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc deleted file mode 100644 index c7f9dafd..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc deleted file mode 100644 index d681bbf6..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc deleted file mode 100644 index dc65019d..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc deleted file mode 100644 index 64c91d1d..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc deleted file mode 100644 index 61f02d58..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc deleted file mode 100644 index 78a9df4e..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc deleted file mode 100644 index ac5c552b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc deleted file mode 100644 index 0f34d75c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc deleted file mode 100644 index a69ff8b1..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc deleted file mode 100644 index 75d93d46..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc deleted file mode 100644 index db45ec22..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc deleted file mode 100644 index e5bab26a..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc deleted file mode 100644 index b87e8a98..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 7989cc91..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__init__.py b/venv/lib/python3.12/site-packages/dns/rdtypes/__init__.py deleted file mode 100644 index 3997f84c..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdata type classes""" - -__all__ = [ - "ANY", - "IN", - "CH", - "dnskeybase", - "dsbase", - "euibase", - "mxbase", - "nsbase", - "svcbbase", - "tlsabase", - "txtbase", - "util", -] diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 55b9c1e5..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc deleted file mode 100644 index 205e67f7..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-312.pyc deleted file mode 100644 index f609151c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/euibase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/euibase.cpython-312.pyc deleted file mode 100644 index a87f9e85..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/euibase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-312.pyc deleted file mode 100644 index 44989bd9..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-312.pyc deleted file mode 100644 index e506c40c..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc deleted file mode 100644 index cab7d7ef..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc deleted file mode 100644 index 26f8cedf..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-312.pyc deleted file mode 100644 index 66f3b82b..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/util.cpython-312.pyc deleted file mode 100644 index c92712e1..00000000 Binary files a/venv/lib/python3.12/site-packages/dns/rdtypes/__pycache__/util.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/dnskeybase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/dnskeybase.py deleted file mode 100644 index db300f8b..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/dnskeybase.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import enum -import struct - -import dns.dnssectypes -import dns.exception -import dns.immutable -import dns.rdata - -# wildcard import -__all__ = ["SEP", "REVOKE", "ZONE"] # noqa: F822 - - -class Flag(enum.IntFlag): - SEP = 0x0001 - REVOKE = 0x0080 - ZONE = 0x0100 - - -@dns.immutable.immutable -class DNSKEYBase(dns.rdata.Rdata): - """Base class for rdata that is like a DNSKEY record""" - - __slots__ = ["flags", "protocol", "algorithm", "key"] - - def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): - super().__init__(rdclass, rdtype) - self.flags = Flag(self._as_uint16(flags)) - self.protocol = self._as_uint8(protocol) - self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) - self.key = self._as_bytes(key) - - def to_text(self, origin=None, relativize=True, **kw): - return "%d %d %d %s" % ( - self.flags, - self.protocol, - self.algorithm, - dns.rdata._base64ify(self.key, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - flags = tok.get_uint16() - protocol = tok.get_uint8() - algorithm = tok.get_string() - b64 = tok.concatenate_remaining_identifiers().encode() - key = base64.b64decode(b64) - return cls(rdclass, rdtype, flags, protocol, algorithm, key) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) - file.write(header) - file.write(self.key) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!HBB") - key = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], key) - - -### BEGIN generated Flag constants - -SEP = Flag.SEP -REVOKE = Flag.REVOKE -ZONE = Flag.ZONE - -### END generated Flag constants diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/dsbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/dsbase.py deleted file mode 100644 index cd21f026..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/dsbase.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2010, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.dnssectypes -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class DSBase(dns.rdata.Rdata): - """Base class for rdata that is like a DS record""" - - __slots__ = ["key_tag", "algorithm", "digest_type", "digest"] - - # Digest types registry: - # https://www.iana.org/assignments/ds-rr-types/ds-rr-types.xhtml - _digest_length_by_type = { - 1: 20, # SHA-1, RFC 3658 Sec. 2.4 - 2: 32, # SHA-256, RFC 4509 Sec. 2.2 - 3: 32, # GOST R 34.11-94, RFC 5933 Sec. 4 in conjunction with RFC 4490 Sec. 2.1 - 4: 48, # SHA-384, RFC 6605 Sec. 2 - } - - def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, digest): - super().__init__(rdclass, rdtype) - self.key_tag = self._as_uint16(key_tag) - self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) - self.digest_type = dns.dnssectypes.DSDigest.make(self._as_uint8(digest_type)) - self.digest = self._as_bytes(digest) - try: - if len(self.digest) != self._digest_length_by_type[self.digest_type]: - raise ValueError("digest length inconsistent with digest type") - except KeyError: - if self.digest_type == 0: # reserved, RFC 3658 Sec. 2.4 - raise ValueError("digest type 0 is reserved") - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %d %s" % ( - self.key_tag, - self.algorithm, - self.digest_type, - dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - key_tag = tok.get_uint16() - algorithm = tok.get_string() - digest_type = tok.get_uint8() - digest = tok.concatenate_remaining_identifiers().encode() - digest = binascii.unhexlify(digest) - return cls(rdclass, rdtype, key_tag, algorithm, digest_type, digest) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!HBB", self.key_tag, self.algorithm, self.digest_type) - file.write(header) - file.write(self.digest) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!HBB") - digest = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/euibase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/euibase.py deleted file mode 100644 index a39c166b..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/euibase.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii - -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class EUIBase(dns.rdata.Rdata): - """EUIxx record""" - - # see: rfc7043.txt - - __slots__ = ["eui"] - # define these in subclasses - # byte_len = 6 # 0123456789ab (in hex) - # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab - - def __init__(self, rdclass, rdtype, eui): - super().__init__(rdclass, rdtype) - self.eui = self._as_bytes(eui) - if len(self.eui) != self.byte_len: - raise dns.exception.FormError( - f"EUI{self.byte_len * 8} rdata has to have {self.byte_len} bytes" - ) - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._hexify(self.eui, chunksize=2, separator=b"-", **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - text = tok.get_string() - if len(text) != cls.text_len: - raise dns.exception.SyntaxError( - f"Input text must have {cls.text_len} characters" - ) - for i in range(2, cls.byte_len * 3 - 1, 3): - if text[i] != "-": - raise dns.exception.SyntaxError(f"Dash expected at position {i}") - text = text.replace("-", "") - try: - data = binascii.unhexlify(text.encode()) - except (ValueError, TypeError) as ex: - raise dns.exception.SyntaxError(f"Hex decoding error: {str(ex)}") - return cls(rdclass, rdtype, data) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.eui) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - eui = parser.get_bytes(cls.byte_len) - return cls(rdclass, rdtype, eui) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/mxbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/mxbase.py deleted file mode 100644 index 6d5e3d87..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/mxbase.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""MX-like base classes.""" - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class MXBase(dns.rdata.Rdata): - """Base class for rdata that is like an MX record.""" - - __slots__ = ["preference", "exchange"] - - def __init__(self, rdclass, rdtype, preference, exchange): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.exchange = self._as_name(exchange) - - def to_text(self, origin=None, relativize=True, **kw): - exchange = self.exchange.choose_relativity(origin, relativize) - return "%d %s" % (self.preference, exchange) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - exchange = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, preference, exchange) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - pref = struct.pack("!H", self.preference) - file.write(pref) - self.exchange.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - exchange = parser.get_name(origin) - return cls(rdclass, rdtype, preference, exchange) - - def _processing_priority(self): - return self.preference - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) - - -@dns.immutable.immutable -class UncompressedMX(MXBase): - """Base class for rdata that is like an MX record, but whose name - is not compressed when converted to DNS wire format, and whose - digestable form is not downcased.""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - super()._to_wire(file, None, origin, False) - - -@dns.immutable.immutable -class UncompressedDowncasingMX(MXBase): - """Base class for rdata that is like an MX record, but whose name - is not compressed when convert to DNS wire format.""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - super()._to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/nsbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/nsbase.py deleted file mode 100644 index 904224f0..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/nsbase.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""NS-like base classes.""" - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata - - -@dns.immutable.immutable -class NSBase(dns.rdata.Rdata): - """Base class for rdata that is like an NS record.""" - - __slots__ = ["target"] - - def __init__(self, rdclass, rdtype, target): - super().__init__(rdclass, rdtype) - self.target = self._as_name(target) - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - return str(target) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - target = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, target) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.target.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - target = parser.get_name(origin) - return cls(rdclass, rdtype, target) - - -@dns.immutable.immutable -class UncompressedNS(NSBase): - """Base class for rdata that is like an NS record, but whose name - is not compressed when convert to DNS wire format, and whose - digestable form is not downcased.""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.target.to_wire(file, None, origin, False) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/svcbbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/svcbbase.py deleted file mode 100644 index a2b15b92..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/svcbbase.py +++ /dev/null @@ -1,585 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import base64 -import enum -import struct - -import dns.enum -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.ipv6 -import dns.name -import dns.rdata -import dns.rdtypes.util -import dns.renderer -import dns.tokenizer -import dns.wire - -# Until there is an RFC, this module is experimental and may be changed in -# incompatible ways. - - -class UnknownParamKey(dns.exception.DNSException): - """Unknown SVCB ParamKey""" - - -class ParamKey(dns.enum.IntEnum): - """SVCB ParamKey""" - - MANDATORY = 0 - ALPN = 1 - NO_DEFAULT_ALPN = 2 - PORT = 3 - IPV4HINT = 4 - ECH = 5 - IPV6HINT = 6 - DOHPATH = 7 - OHTTP = 8 - - @classmethod - def _maximum(cls): - return 65535 - - @classmethod - def _short_name(cls): - return "SVCBParamKey" - - @classmethod - def _prefix(cls): - return "KEY" - - @classmethod - def _unknown_exception_class(cls): - return UnknownParamKey - - -class Emptiness(enum.IntEnum): - NEVER = 0 - ALWAYS = 1 - ALLOWED = 2 - - -def _validate_key(key): - force_generic = False - if isinstance(key, bytes): - # We decode to latin-1 so we get 0-255 as valid and do NOT interpret - # UTF-8 sequences - key = key.decode("latin-1") - if isinstance(key, str): - if key.lower().startswith("key"): - force_generic = True - if key[3:].startswith("0") and len(key) != 4: - # key has leading zeros - raise ValueError("leading zeros in key") - key = key.replace("-", "_") - return (ParamKey.make(key), force_generic) - - -def key_to_text(key): - return ParamKey.to_text(key).replace("_", "-").lower() - - -# Like rdata escapify, but escapes ',' too. - -_escaped = b'",\\' - - -def _escapify(qstring): - text = "" - for c in qstring: - if c in _escaped: - text += "\\" + chr(c) - elif c >= 0x20 and c < 0x7F: - text += chr(c) - else: - text += "\\%03d" % c - return text - - -def _unescape(value): - if value == "": - return value - unescaped = b"" - l = len(value) - i = 0 - while i < l: - c = value[i] - i += 1 - if c == "\\": - if i >= l: # pragma: no cover (can't happen via tokenizer get()) - raise dns.exception.UnexpectedEnd - c = value[i] - i += 1 - if c.isdigit(): - if i >= l: - raise dns.exception.UnexpectedEnd - c2 = value[i] - i += 1 - if i >= l: - raise dns.exception.UnexpectedEnd - c3 = value[i] - i += 1 - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - codepoint = int(c) * 100 + int(c2) * 10 + int(c3) - if codepoint > 255: - raise dns.exception.SyntaxError - unescaped += b"%c" % (codepoint) - continue - unescaped += c.encode() - return unescaped - - -def _split(value): - l = len(value) - i = 0 - items = [] - unescaped = b"" - while i < l: - c = value[i] - i += 1 - if c == ord("\\"): - if i >= l: # pragma: no cover (can't happen via tokenizer get()) - raise dns.exception.UnexpectedEnd - c = value[i] - i += 1 - unescaped += b"%c" % (c) - elif c == ord(","): - items.append(unescaped) - unescaped = b"" - else: - unescaped += b"%c" % (c) - items.append(unescaped) - return items - - -@dns.immutable.immutable -class Param: - """Abstract base class for SVCB parameters""" - - @classmethod - def emptiness(cls): - return Emptiness.NEVER - - -@dns.immutable.immutable -class GenericParam(Param): - """Generic SVCB parameter""" - - def __init__(self, value): - self.value = dns.rdata.Rdata._as_bytes(value, True) - - @classmethod - def emptiness(cls): - return Emptiness.ALLOWED - - @classmethod - def from_value(cls, value): - if value is None or len(value) == 0: - return None - else: - return cls(_unescape(value)) - - def to_text(self): - return '"' + dns.rdata._escapify(self.value) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - value = parser.get_bytes(parser.remaining()) - if len(value) == 0: - return None - else: - return cls(value) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - file.write(self.value) - - -@dns.immutable.immutable -class MandatoryParam(Param): - def __init__(self, keys): - # check for duplicates - keys = sorted([_validate_key(key)[0] for key in keys]) - prior_k = None - for k in keys: - if k == prior_k: - raise ValueError(f"duplicate key {k:d}") - prior_k = k - if k == ParamKey.MANDATORY: - raise ValueError("listed the mandatory key as mandatory") - self.keys = tuple(keys) - - @classmethod - def from_value(cls, value): - keys = [k.encode() for k in value.split(",")] - return cls(keys) - - def to_text(self): - return '"' + ",".join([key_to_text(key) for key in self.keys]) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - keys = [] - last_key = -1 - while parser.remaining() > 0: - key = parser.get_uint16() - if key < last_key: - raise dns.exception.FormError("manadatory keys not ascending") - last_key = key - keys.append(key) - return cls(keys) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for key in self.keys: - file.write(struct.pack("!H", key)) - - -@dns.immutable.immutable -class ALPNParam(Param): - def __init__(self, ids): - self.ids = dns.rdata.Rdata._as_tuple( - ids, lambda x: dns.rdata.Rdata._as_bytes(x, True, 255, False) - ) - - @classmethod - def from_value(cls, value): - return cls(_split(_unescape(value))) - - def to_text(self): - value = ",".join([_escapify(id) for id in self.ids]) - return '"' + dns.rdata._escapify(value.encode()) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - ids = [] - while parser.remaining() > 0: - id = parser.get_counted_bytes() - ids.append(id) - return cls(ids) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for id in self.ids: - file.write(struct.pack("!B", len(id))) - file.write(id) - - -@dns.immutable.immutable -class NoDefaultALPNParam(Param): - # We don't ever expect to instantiate this class, but we need - # a from_value() and a from_wire_parser(), so we just return None - # from the class methods when things are OK. - - @classmethod - def emptiness(cls): - return Emptiness.ALWAYS - - @classmethod - def from_value(cls, value): - if value is None or value == "": - return None - else: - raise ValueError("no-default-alpn with non-empty value") - - def to_text(self): - raise NotImplementedError # pragma: no cover - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - if parser.remaining() != 0: - raise dns.exception.FormError - return None - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - raise NotImplementedError # pragma: no cover - - -@dns.immutable.immutable -class PortParam(Param): - def __init__(self, port): - self.port = dns.rdata.Rdata._as_uint16(port) - - @classmethod - def from_value(cls, value): - value = int(value) - return cls(value) - - def to_text(self): - return f'"{self.port}"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - port = parser.get_uint16() - return cls(port) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - file.write(struct.pack("!H", self.port)) - - -@dns.immutable.immutable -class IPv4HintParam(Param): - def __init__(self, addresses): - self.addresses = dns.rdata.Rdata._as_tuple( - addresses, dns.rdata.Rdata._as_ipv4_address - ) - - @classmethod - def from_value(cls, value): - addresses = value.split(",") - return cls(addresses) - - def to_text(self): - return '"' + ",".join(self.addresses) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - addresses = [] - while parser.remaining() > 0: - ip = parser.get_bytes(4) - addresses.append(dns.ipv4.inet_ntoa(ip)) - return cls(addresses) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for address in self.addresses: - file.write(dns.ipv4.inet_aton(address)) - - -@dns.immutable.immutable -class IPv6HintParam(Param): - def __init__(self, addresses): - self.addresses = dns.rdata.Rdata._as_tuple( - addresses, dns.rdata.Rdata._as_ipv6_address - ) - - @classmethod - def from_value(cls, value): - addresses = value.split(",") - return cls(addresses) - - def to_text(self): - return '"' + ",".join(self.addresses) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - addresses = [] - while parser.remaining() > 0: - ip = parser.get_bytes(16) - addresses.append(dns.ipv6.inet_ntoa(ip)) - return cls(addresses) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for address in self.addresses: - file.write(dns.ipv6.inet_aton(address)) - - -@dns.immutable.immutable -class ECHParam(Param): - def __init__(self, ech): - self.ech = dns.rdata.Rdata._as_bytes(ech, True) - - @classmethod - def from_value(cls, value): - if "\\" in value: - raise ValueError("escape in ECH value") - value = base64.b64decode(value.encode()) - return cls(value) - - def to_text(self): - b64 = base64.b64encode(self.ech).decode("ascii") - return f'"{b64}"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - value = parser.get_bytes(parser.remaining()) - return cls(value) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - file.write(self.ech) - - -@dns.immutable.immutable -class OHTTPParam(Param): - # We don't ever expect to instantiate this class, but we need - # a from_value() and a from_wire_parser(), so we just return None - # from the class methods when things are OK. - - @classmethod - def emptiness(cls): - return Emptiness.ALWAYS - - @classmethod - def from_value(cls, value): - if value is None or value == "": - return None - else: - raise ValueError("ohttp with non-empty value") - - def to_text(self): - raise NotImplementedError # pragma: no cover - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - if parser.remaining() != 0: - raise dns.exception.FormError - return None - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - raise NotImplementedError # pragma: no cover - - -_class_for_key = { - ParamKey.MANDATORY: MandatoryParam, - ParamKey.ALPN: ALPNParam, - ParamKey.NO_DEFAULT_ALPN: NoDefaultALPNParam, - ParamKey.PORT: PortParam, - ParamKey.IPV4HINT: IPv4HintParam, - ParamKey.ECH: ECHParam, - ParamKey.IPV6HINT: IPv6HintParam, - ParamKey.OHTTP: OHTTPParam, -} - - -def _validate_and_define(params, key, value): - (key, force_generic) = _validate_key(_unescape(key)) - if key in params: - raise SyntaxError(f'duplicate key "{key:d}"') - cls = _class_for_key.get(key, GenericParam) - emptiness = cls.emptiness() - if value is None: - if emptiness == Emptiness.NEVER: - raise SyntaxError("value cannot be empty") - value = cls.from_value(value) - else: - if force_generic: - value = cls.from_wire_parser(dns.wire.Parser(_unescape(value))) - else: - value = cls.from_value(value) - params[key] = value - - -@dns.immutable.immutable -class SVCBBase(dns.rdata.Rdata): - """Base class for SVCB-like records""" - - # see: draft-ietf-dnsop-svcb-https-11 - - __slots__ = ["priority", "target", "params"] - - def __init__(self, rdclass, rdtype, priority, target, params): - super().__init__(rdclass, rdtype) - self.priority = self._as_uint16(priority) - self.target = self._as_name(target) - for k, v in params.items(): - k = ParamKey.make(k) - if not isinstance(v, Param) and v is not None: - raise ValueError(f"{k:d} not a Param") - self.params = dns.immutable.Dict(params) - # Make sure any parameter listed as mandatory is present in the - # record. - mandatory = params.get(ParamKey.MANDATORY) - if mandatory: - for key in mandatory.keys: - # Note we have to say "not in" as we have None as a value - # so a get() and a not None test would be wrong. - if key not in params: - raise ValueError(f"key {key:d} declared mandatory but not present") - # The no-default-alpn parameter requires the alpn parameter. - if ParamKey.NO_DEFAULT_ALPN in params: - if ParamKey.ALPN not in params: - raise ValueError("no-default-alpn present, but alpn missing") - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - params = [] - for key in sorted(self.params.keys()): - value = self.params[key] - if value is None: - params.append(key_to_text(key)) - else: - kv = key_to_text(key) + "=" + value.to_text() - params.append(kv) - if len(params) > 0: - space = " " - else: - space = "" - return "%d %s%s%s" % (self.priority, target, space, " ".join(params)) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - priority = tok.get_uint16() - target = tok.get_name(origin, relativize, relativize_to) - if priority == 0: - token = tok.get() - if not token.is_eol_or_eof(): - raise SyntaxError("parameters in AliasMode") - tok.unget(token) - params = {} - while True: - token = tok.get() - if token.is_eol_or_eof(): - tok.unget(token) - break - if token.ttype != dns.tokenizer.IDENTIFIER: - raise SyntaxError("parameter is not an identifier") - equals = token.value.find("=") - if equals == len(token.value) - 1: - # 'key=', so next token should be a quoted string without - # any intervening whitespace. - key = token.value[:-1] - token = tok.get(want_leading=True) - if token.ttype != dns.tokenizer.QUOTED_STRING: - raise SyntaxError("whitespace after =") - value = token.value - elif equals > 0: - # key=value - key = token.value[:equals] - value = token.value[equals + 1 :] - elif equals == 0: - # =key - raise SyntaxError('parameter cannot start with "="') - else: - # key - key = token.value - value = None - _validate_and_define(params, key, value) - return cls(rdclass, rdtype, priority, target, params) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.priority)) - self.target.to_wire(file, None, origin, False) - for key in sorted(self.params): - file.write(struct.pack("!H", key)) - value = self.params[key] - with dns.renderer.prefixed_length(file, 2): - # Note that we're still writing a length of zero if the value is None - if value is not None: - value.to_wire(file, origin) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - priority = parser.get_uint16() - target = parser.get_name(origin) - if priority == 0 and parser.remaining() != 0: - raise dns.exception.FormError("parameters in AliasMode") - params = {} - prior_key = -1 - while parser.remaining() > 0: - key = parser.get_uint16() - if key < prior_key: - raise dns.exception.FormError("keys not in order") - prior_key = key - vlen = parser.get_uint16() - pcls = _class_for_key.get(key, GenericParam) - with parser.restrict_to(vlen): - value = pcls.from_wire_parser(parser, origin) - params[key] = value - return cls(rdclass, rdtype, priority, target, params) - - def _processing_priority(self): - return self.priority - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/tlsabase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/tlsabase.py deleted file mode 100644 index a059d2c4..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/tlsabase.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class TLSABase(dns.rdata.Rdata): - """Base class for TLSA and SMIMEA records""" - - # see: RFC 6698 - - __slots__ = ["usage", "selector", "mtype", "cert"] - - def __init__(self, rdclass, rdtype, usage, selector, mtype, cert): - super().__init__(rdclass, rdtype) - self.usage = self._as_uint8(usage) - self.selector = self._as_uint8(selector) - self.mtype = self._as_uint8(mtype) - self.cert = self._as_bytes(cert) - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %d %s" % ( - self.usage, - self.selector, - self.mtype, - dns.rdata._hexify(self.cert, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - usage = tok.get_uint8() - selector = tok.get_uint8() - mtype = tok.get_uint8() - cert = tok.concatenate_remaining_identifiers().encode() - cert = binascii.unhexlify(cert) - return cls(rdclass, rdtype, usage, selector, mtype, cert) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!BBB", self.usage, self.selector, self.mtype) - file.write(header) - file.write(self.cert) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("BBB") - cert = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/txtbase.py b/venv/lib/python3.12/site-packages/dns/rdtypes/txtbase.py deleted file mode 100644 index 73db6d9e..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/txtbase.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""TXT-like base class.""" - -from typing import Any, Dict, Iterable, Optional, Tuple, Union - -import dns.exception -import dns.immutable -import dns.rdata -import dns.renderer -import dns.tokenizer - - -@dns.immutable.immutable -class TXTBase(dns.rdata.Rdata): - """Base class for rdata that is like a TXT record (see RFC 1035).""" - - __slots__ = ["strings"] - - def __init__( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - strings: Iterable[Union[bytes, str]], - ): - """Initialize a TXT-like rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - - *strings*, a tuple of ``bytes`` - """ - super().__init__(rdclass, rdtype) - self.strings: Tuple[bytes] = self._as_tuple( - strings, lambda x: self._as_bytes(x, True, 255) - ) - if len(self.strings) == 0: - raise ValueError("the list of strings must not be empty") - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any], - ) -> str: - txt = "" - prefix = "" - for s in self.strings: - txt += f'{prefix}"{dns.rdata._escapify(s)}"' - prefix = " " - return txt - - @classmethod - def from_text( - cls, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - tok: dns.tokenizer.Tokenizer, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, - ) -> dns.rdata.Rdata: - strings = [] - for token in tok.get_remaining(): - token = token.unescape_to_bytes() - # The 'if' below is always true in the current code, but we - # are leaving this check in in case things change some day. - if not ( - token.is_quoted_string() or token.is_identifier() - ): # pragma: no cover - raise dns.exception.SyntaxError("expected a string") - if len(token.value) > 255: - raise dns.exception.SyntaxError("string too long") - strings.append(token.value) - if len(strings) == 0: - raise dns.exception.UnexpectedEnd - return cls(rdclass, rdtype, strings) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - for s in self.strings: - with dns.renderer.prefixed_length(file, 1): - file.write(s) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - strings = [] - while parser.remaining() > 0: - s = parser.get_counted_bytes() - strings.append(s) - return cls(rdclass, rdtype, strings) diff --git a/venv/lib/python3.12/site-packages/dns/rdtypes/util.py b/venv/lib/python3.12/site-packages/dns/rdtypes/util.py deleted file mode 100644 index 653a0bf2..00000000 --- a/venv/lib/python3.12/site-packages/dns/rdtypes/util.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import collections -import random -import struct -from typing import Any, List - -import dns.exception -import dns.ipv4 -import dns.ipv6 -import dns.name -import dns.rdata - - -class Gateway: - """A helper class for the IPSECKEY gateway and AMTRELAY relay fields""" - - name = "" - - def __init__(self, type, gateway=None): - self.type = dns.rdata.Rdata._as_uint8(type) - self.gateway = gateway - self._check() - - @classmethod - def _invalid_type(cls, gateway_type): - return f"invalid {cls.name} type: {gateway_type}" - - def _check(self): - if self.type == 0: - if self.gateway not in (".", None): - raise SyntaxError(f"invalid {self.name} for type 0") - self.gateway = None - elif self.type == 1: - # check that it's OK - dns.ipv4.inet_aton(self.gateway) - elif self.type == 2: - # check that it's OK - dns.ipv6.inet_aton(self.gateway) - elif self.type == 3: - if not isinstance(self.gateway, dns.name.Name): - raise SyntaxError(f"invalid {self.name}; not a name") - else: - raise SyntaxError(self._invalid_type(self.type)) - - def to_text(self, origin=None, relativize=True): - if self.type == 0: - return "." - elif self.type in (1, 2): - return self.gateway - elif self.type == 3: - return str(self.gateway.choose_relativity(origin, relativize)) - else: - raise ValueError(self._invalid_type(self.type)) # pragma: no cover - - @classmethod - def from_text( - cls, gateway_type, tok, origin=None, relativize=True, relativize_to=None - ): - if gateway_type in (0, 1, 2): - gateway = tok.get_string() - elif gateway_type == 3: - gateway = tok.get_name(origin, relativize, relativize_to) - else: - raise dns.exception.SyntaxError( - cls._invalid_type(gateway_type) - ) # pragma: no cover - return cls(gateway_type, gateway) - - # pylint: disable=unused-argument - def to_wire(self, file, compress=None, origin=None, canonicalize=False): - if self.type == 0: - pass - elif self.type == 1: - file.write(dns.ipv4.inet_aton(self.gateway)) - elif self.type == 2: - file.write(dns.ipv6.inet_aton(self.gateway)) - elif self.type == 3: - self.gateway.to_wire(file, None, origin, False) - else: - raise ValueError(self._invalid_type(self.type)) # pragma: no cover - - # pylint: enable=unused-argument - - @classmethod - def from_wire_parser(cls, gateway_type, parser, origin=None): - if gateway_type == 0: - gateway = None - elif gateway_type == 1: - gateway = dns.ipv4.inet_ntoa(parser.get_bytes(4)) - elif gateway_type == 2: - gateway = dns.ipv6.inet_ntoa(parser.get_bytes(16)) - elif gateway_type == 3: - gateway = parser.get_name(origin) - else: - raise dns.exception.FormError(cls._invalid_type(gateway_type)) - return cls(gateway_type, gateway) - - -class Bitmap: - """A helper class for the NSEC/NSEC3/CSYNC type bitmaps""" - - type_name = "" - - def __init__(self, windows=None): - last_window = -1 - self.windows = windows - for window, bitmap in self.windows: - if not isinstance(window, int): - raise ValueError(f"bad {self.type_name} window type") - if window <= last_window: - raise ValueError(f"bad {self.type_name} window order") - if window > 256: - raise ValueError(f"bad {self.type_name} window number") - last_window = window - if not isinstance(bitmap, bytes): - raise ValueError(f"bad {self.type_name} octets type") - if len(bitmap) == 0 or len(bitmap) > 32: - raise ValueError(f"bad {self.type_name} octets") - - def to_text(self) -> str: - text = "" - for window, bitmap in self.windows: - bits = [] - for i, byte in enumerate(bitmap): - for j in range(0, 8): - if byte & (0x80 >> j): - rdtype = window * 256 + i * 8 + j - bits.append(dns.rdatatype.to_text(rdtype)) - text += " " + " ".join(bits) - return text - - @classmethod - def from_text(cls, tok: "dns.tokenizer.Tokenizer") -> "Bitmap": - rdtypes = [] - for token in tok.get_remaining(): - rdtype = dns.rdatatype.from_text(token.unescape().value) - if rdtype == 0: - raise dns.exception.SyntaxError(f"{cls.type_name} with bit 0") - rdtypes.append(rdtype) - return cls.from_rdtypes(rdtypes) - - @classmethod - def from_rdtypes(cls, rdtypes: List[dns.rdatatype.RdataType]) -> "Bitmap": - rdtypes = sorted(rdtypes) - window = 0 - octets = 0 - prior_rdtype = 0 - bitmap = bytearray(b"\0" * 32) - windows = [] - for rdtype in rdtypes: - if rdtype == prior_rdtype: - continue - prior_rdtype = rdtype - new_window = rdtype // 256 - if new_window != window: - if octets != 0: - windows.append((window, bytes(bitmap[0:octets]))) - bitmap = bytearray(b"\0" * 32) - window = new_window - offset = rdtype % 256 - byte = offset // 8 - bit = offset % 8 - octets = byte + 1 - bitmap[byte] = bitmap[byte] | (0x80 >> bit) - if octets != 0: - windows.append((window, bytes(bitmap[0:octets]))) - return cls(windows) - - def to_wire(self, file: Any) -> None: - for window, bitmap in self.windows: - file.write(struct.pack("!BB", window, len(bitmap))) - file.write(bitmap) - - @classmethod - def from_wire_parser(cls, parser: "dns.wire.Parser") -> "Bitmap": - windows = [] - while parser.remaining() > 0: - window = parser.get_uint8() - bitmap = parser.get_counted_bytes() - windows.append((window, bitmap)) - return cls(windows) - - -def _priority_table(items): - by_priority = collections.defaultdict(list) - for rdata in items: - by_priority[rdata._processing_priority()].append(rdata) - return by_priority - - -def priority_processing_order(iterable): - items = list(iterable) - if len(items) == 1: - return items - by_priority = _priority_table(items) - ordered = [] - for k in sorted(by_priority.keys()): - rdatas = by_priority[k] - random.shuffle(rdatas) - ordered.extend(rdatas) - return ordered - - -_no_weight = 0.1 - - -def weighted_processing_order(iterable): - items = list(iterable) - if len(items) == 1: - return items - by_priority = _priority_table(items) - ordered = [] - for k in sorted(by_priority.keys()): - rdatas = by_priority[k] - total = sum(rdata._processing_weight() or _no_weight for rdata in rdatas) - while len(rdatas) > 1: - r = random.uniform(0, total) - for n, rdata in enumerate(rdatas): # noqa: B007 - weight = rdata._processing_weight() or _no_weight - if weight > r: - break - r -= weight - total -= weight - ordered.append(rdata) # pylint: disable=undefined-loop-variable - del rdatas[n] # pylint: disable=undefined-loop-variable - ordered.append(rdatas[0]) - return ordered - - -def parse_formatted_hex(formatted, num_chunks, chunk_size, separator): - if len(formatted) != num_chunks * (chunk_size + 1) - 1: - raise ValueError("invalid formatted hex string") - value = b"" - for _ in range(num_chunks): - chunk = formatted[0:chunk_size] - value += int(chunk, 16).to_bytes(chunk_size // 2, "big") - formatted = formatted[chunk_size:] - if len(formatted) > 0 and formatted[0] != separator: - raise ValueError("invalid formatted hex string") - formatted = formatted[1:] - return value diff --git a/venv/lib/python3.12/site-packages/dns/renderer.py b/venv/lib/python3.12/site-packages/dns/renderer.py deleted file mode 100644 index a77481f6..00000000 --- a/venv/lib/python3.12/site-packages/dns/renderer.py +++ /dev/null @@ -1,346 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Help for building DNS wire format messages""" - -import contextlib -import io -import random -import struct -import time - -import dns.exception -import dns.tsig - -QUESTION = 0 -ANSWER = 1 -AUTHORITY = 2 -ADDITIONAL = 3 - - -@contextlib.contextmanager -def prefixed_length(output, length_length): - output.write(b"\00" * length_length) - start = output.tell() - yield - end = output.tell() - length = end - start - if length > 0: - try: - output.seek(start - length_length) - try: - output.write(length.to_bytes(length_length, "big")) - except OverflowError: - raise dns.exception.FormError - finally: - output.seek(end) - - -class Renderer: - """Helper class for building DNS wire-format messages. - - Most applications can use the higher-level L{dns.message.Message} - class and its to_wire() method to generate wire-format messages. - This class is for those applications which need finer control - over the generation of messages. - - Typical use:: - - r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) - r.add_question(qname, qtype, qclass) - r.add_rrset(dns.renderer.ANSWER, rrset_1) - r.add_rrset(dns.renderer.ANSWER, rrset_2) - r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) - r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_1) - r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_2) - r.add_edns(0, 0, 4096) - r.write_header() - r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) - wire = r.get_wire() - - If padding is going to be used, then the OPT record MUST be - written after everything else in the additional section except for - the TSIG (if any). - - output, an io.BytesIO, where rendering is written - - id: the message id - - flags: the message flags - - max_size: the maximum size of the message - - origin: the origin to use when rendering relative names - - compress: the compression table - - section: an int, the section currently being rendered - - counts: list of the number of RRs in each section - - mac: the MAC of the rendered message (if TSIG was used) - """ - - def __init__(self, id=None, flags=0, max_size=65535, origin=None): - """Initialize a new renderer.""" - - self.output = io.BytesIO() - if id is None: - self.id = random.randint(0, 65535) - else: - self.id = id - self.flags = flags - self.max_size = max_size - self.origin = origin - self.compress = {} - self.section = QUESTION - self.counts = [0, 0, 0, 0] - self.output.write(b"\x00" * 12) - self.mac = "" - self.reserved = 0 - self.was_padded = False - - def _rollback(self, where): - """Truncate the output buffer at offset *where*, and remove any - compression table entries that pointed beyond the truncation - point. - """ - - self.output.seek(where) - self.output.truncate() - keys_to_delete = [] - for k, v in self.compress.items(): - if v >= where: - keys_to_delete.append(k) - for k in keys_to_delete: - del self.compress[k] - - def _set_section(self, section): - """Set the renderer's current section. - - Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, - ADDITIONAL. Sections may be empty. - - Raises dns.exception.FormError if an attempt was made to set - a section value less than the current section. - """ - - if self.section != section: - if self.section > section: - raise dns.exception.FormError - self.section = section - - @contextlib.contextmanager - def _track_size(self): - start = self.output.tell() - yield start - if self.output.tell() > self.max_size: - self._rollback(start) - raise dns.exception.TooBig - - @contextlib.contextmanager - def _temporarily_seek_to(self, where): - current = self.output.tell() - try: - self.output.seek(where) - yield - finally: - self.output.seek(current) - - def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): - """Add a question to the message.""" - - self._set_section(QUESTION) - with self._track_size(): - qname.to_wire(self.output, self.compress, self.origin) - self.output.write(struct.pack("!HH", rdtype, rdclass)) - self.counts[QUESTION] += 1 - - def add_rrset(self, section, rrset, **kw): - """Add the rrset to the specified section. - - Any keyword arguments are passed on to the rdataset's to_wire() - routine. - """ - - self._set_section(section) - with self._track_size(): - n = rrset.to_wire(self.output, self.compress, self.origin, **kw) - self.counts[section] += n - - def add_rdataset(self, section, name, rdataset, **kw): - """Add the rdataset to the specified section, using the specified - name as the owner name. - - Any keyword arguments are passed on to the rdataset's to_wire() - routine. - """ - - self._set_section(section) - with self._track_size(): - n = rdataset.to_wire(name, self.output, self.compress, self.origin, **kw) - self.counts[section] += n - - def add_opt(self, opt, pad=0, opt_size=0, tsig_size=0): - """Add *opt* to the additional section, applying padding if desired. The - padding will take the specified precomputed OPT size and TSIG size into - account. - - Note that we don't have reliable way of knowing how big a GSS-TSIG digest - might be, so we we might not get an even multiple of the pad in that case.""" - if pad: - ttl = opt.ttl - assert opt_size >= 11 - opt_rdata = opt[0] - size_without_padding = self.output.tell() + opt_size + tsig_size - remainder = size_without_padding % pad - if remainder: - pad = b"\x00" * (pad - remainder) - else: - pad = b"" - options = list(opt_rdata.options) - options.append(dns.edns.GenericOption(dns.edns.OptionType.PADDING, pad)) - opt = dns.message.Message._make_opt(ttl, opt_rdata.rdclass, options) - self.was_padded = True - self.add_rrset(ADDITIONAL, opt) - - def add_edns(self, edns, ednsflags, payload, options=None): - """Add an EDNS OPT record to the message.""" - - # make sure the EDNS version in ednsflags agrees with edns - ednsflags &= 0xFF00FFFF - ednsflags |= edns << 16 - opt = dns.message.Message._make_opt(ednsflags, payload, options) - self.add_opt(opt) - - def add_tsig( - self, - keyname, - secret, - fudge, - id, - tsig_error, - other_data, - request_mac, - algorithm=dns.tsig.default_algorithm, - ): - """Add a TSIG signature to the message.""" - - s = self.output.getvalue() - - if isinstance(secret, dns.tsig.Key): - key = secret - else: - key = dns.tsig.Key(keyname, secret, algorithm) - tsig = dns.message.Message._make_tsig( - keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data - ) - (tsig, _) = dns.tsig.sign(s, key, tsig[0], int(time.time()), request_mac) - self._write_tsig(tsig, keyname) - - def add_multi_tsig( - self, - ctx, - keyname, - secret, - fudge, - id, - tsig_error, - other_data, - request_mac, - algorithm=dns.tsig.default_algorithm, - ): - """Add a TSIG signature to the message. Unlike add_tsig(), this can be - used for a series of consecutive DNS envelopes, e.g. for a zone - transfer over TCP [RFC2845, 4.4]. - - For the first message in the sequence, give ctx=None. For each - subsequent message, give the ctx that was returned from the - add_multi_tsig() call for the previous message.""" - - s = self.output.getvalue() - - if isinstance(secret, dns.tsig.Key): - key = secret - else: - key = dns.tsig.Key(keyname, secret, algorithm) - tsig = dns.message.Message._make_tsig( - keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data - ) - (tsig, ctx) = dns.tsig.sign( - s, key, tsig[0], int(time.time()), request_mac, ctx, True - ) - self._write_tsig(tsig, keyname) - return ctx - - def _write_tsig(self, tsig, keyname): - if self.was_padded: - compress = None - else: - compress = self.compress - self._set_section(ADDITIONAL) - with self._track_size(): - keyname.to_wire(self.output, compress, self.origin) - self.output.write( - struct.pack("!HHI", dns.rdatatype.TSIG, dns.rdataclass.ANY, 0) - ) - with prefixed_length(self.output, 2): - tsig.to_wire(self.output) - - self.counts[ADDITIONAL] += 1 - with self._temporarily_seek_to(10): - self.output.write(struct.pack("!H", self.counts[ADDITIONAL])) - - def write_header(self): - """Write the DNS message header. - - Writing the DNS message header is done after all sections - have been rendered, but before the optional TSIG signature - is added. - """ - - with self._temporarily_seek_to(0): - self.output.write( - struct.pack( - "!HHHHHH", - self.id, - self.flags, - self.counts[0], - self.counts[1], - self.counts[2], - self.counts[3], - ) - ) - - def get_wire(self): - """Return the wire format message.""" - - return self.output.getvalue() - - def reserve(self, size: int) -> None: - """Reserve *size* bytes.""" - if size < 0: - raise ValueError("reserved amount must be non-negative") - if size > self.max_size: - raise ValueError("cannot reserve more than the maximum size") - self.reserved += size - self.max_size -= size - - def release_reserved(self) -> None: - """Release the reserved bytes.""" - self.max_size += self.reserved - self.reserved = 0 diff --git a/venv/lib/python3.12/site-packages/dns/resolver.py b/venv/lib/python3.12/site-packages/dns/resolver.py deleted file mode 100644 index 3ba76e31..00000000 --- a/venv/lib/python3.12/site-packages/dns/resolver.py +++ /dev/null @@ -1,2053 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS stub resolver.""" - -import contextlib -import random -import socket -import sys -import threading -import time -import warnings -from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union -from urllib.parse import urlparse - -import dns._ddr -import dns.edns -import dns.exception -import dns.flags -import dns.inet -import dns.ipv4 -import dns.ipv6 -import dns.message -import dns.name -import dns.rdata -import dns.nameserver -import dns.query -import dns.rcode -import dns.rdataclass -import dns.rdatatype -import dns.rdtypes.svcbbase -import dns.reversename -import dns.tsig - -if sys.platform == "win32": # pragma: no cover - import dns.win32util - - -class NXDOMAIN(dns.exception.DNSException): - """The DNS query name does not exist.""" - - supp_kwargs = {"qnames", "responses"} - fmt = None # we have our own __str__ implementation - - # pylint: disable=arguments-differ - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _check_kwargs(self, qnames, responses=None): - if not isinstance(qnames, (list, tuple, set)): - raise AttributeError("qnames must be a list, tuple or set") - if len(qnames) == 0: - raise AttributeError("qnames must contain at least one element") - if responses is None: - responses = {} - elif not isinstance(responses, dict): - raise AttributeError("responses must be a dict(qname=response)") - kwargs = dict(qnames=qnames, responses=responses) - return kwargs - - def __str__(self) -> str: - if "qnames" not in self.kwargs: - return super().__str__() - qnames = self.kwargs["qnames"] - if len(qnames) > 1: - msg = "None of DNS query names exist" - else: - msg = "The DNS query name does not exist" - qnames = ", ".join(map(str, qnames)) - return f"{msg}: {qnames}" - - @property - def canonical_name(self): - """Return the unresolved canonical name.""" - if "qnames" not in self.kwargs: - raise TypeError("parametrized exception required") - for qname in self.kwargs["qnames"]: - response = self.kwargs["responses"][qname] - try: - cname = response.canonical_name() - if cname != qname: - return cname - except Exception: # pragma: no cover - # We can just eat this exception as it means there was - # something wrong with the response. - pass - return self.kwargs["qnames"][0] - - def __add__(self, e_nx): - """Augment by results from another NXDOMAIN exception.""" - qnames0 = list(self.kwargs.get("qnames", [])) - responses0 = dict(self.kwargs.get("responses", {})) - responses1 = e_nx.kwargs.get("responses", {}) - for qname1 in e_nx.kwargs.get("qnames", []): - if qname1 not in qnames0: - qnames0.append(qname1) - if qname1 in responses1: - responses0[qname1] = responses1[qname1] - return NXDOMAIN(qnames=qnames0, responses=responses0) - - def qnames(self): - """All of the names that were tried. - - Returns a list of ``dns.name.Name``. - """ - return self.kwargs["qnames"] - - def responses(self): - """A map from queried names to their NXDOMAIN responses. - - Returns a dict mapping a ``dns.name.Name`` to a - ``dns.message.Message``. - """ - return self.kwargs["responses"] - - def response(self, qname): - """The response for query *qname*. - - Returns a ``dns.message.Message``. - """ - return self.kwargs["responses"][qname] - - -class YXDOMAIN(dns.exception.DNSException): - """The DNS query name is too long after DNAME substitution.""" - - -ErrorTuple = Tuple[ - Optional[str], - bool, - int, - Union[Exception, str], - Optional[dns.message.Message], -] - - -def _errors_to_text(errors: List[ErrorTuple]) -> List[str]: - """Turn a resolution errors trace into a list of text.""" - texts = [] - for err in errors: - texts.append(f"Server {err[0]} answered {err[3]}") - return texts - - -class LifetimeTimeout(dns.exception.Timeout): - """The resolution lifetime expired.""" - - msg = "The resolution lifetime expired." - fmt = f"{msg[:-1]} after {{timeout:.3f}} seconds: {{errors}}" - supp_kwargs = {"timeout", "errors"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _fmt_kwargs(self, **kwargs): - srv_msgs = _errors_to_text(kwargs["errors"]) - return super()._fmt_kwargs( - timeout=kwargs["timeout"], errors="; ".join(srv_msgs) - ) - - -# We added more detail to resolution timeouts, but they are still -# subclasses of dns.exception.Timeout for backwards compatibility. We also -# keep dns.resolver.Timeout defined for backwards compatibility. -Timeout = LifetimeTimeout - - -class NoAnswer(dns.exception.DNSException): - """The DNS response does not contain an answer to the question.""" - - fmt = "The DNS response does not contain an answer to the question: {query}" - supp_kwargs = {"response"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _fmt_kwargs(self, **kwargs): - return super()._fmt_kwargs(query=kwargs["response"].question) - - def response(self): - return self.kwargs["response"] - - -class NoNameservers(dns.exception.DNSException): - """All nameservers failed to answer the query. - - errors: list of servers and respective errors - The type of errors is - [(server IP address, any object convertible to string)]. - Non-empty errors list will add explanatory message () - """ - - msg = "All nameservers failed to answer the query." - fmt = f"{msg[:-1]} {{query}}: {{errors}}" - supp_kwargs = {"request", "errors"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _fmt_kwargs(self, **kwargs): - srv_msgs = _errors_to_text(kwargs["errors"]) - return super()._fmt_kwargs( - query=kwargs["request"].question, errors="; ".join(srv_msgs) - ) - - -class NotAbsolute(dns.exception.DNSException): - """An absolute domain name is required but a relative name was provided.""" - - -class NoRootSOA(dns.exception.DNSException): - """There is no SOA RR at the DNS root name. This should never happen!""" - - -class NoMetaqueries(dns.exception.DNSException): - """DNS metaqueries are not allowed.""" - - -class NoResolverConfiguration(dns.exception.DNSException): - """Resolver configuration could not be read or specified no nameservers.""" - - -class Answer: - """DNS stub resolver answer. - - Instances of this class bundle up the result of a successful DNS - resolution. - - For convenience, the answer object implements much of the sequence - protocol, forwarding to its ``rrset`` attribute. E.g. - ``for a in answer`` is equivalent to ``for a in answer.rrset``. - ``answer[i]`` is equivalent to ``answer.rrset[i]``, and - ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. - - Note that CNAMEs or DNAMEs in the response may mean that answer - RRset's name might not be the query name. - """ - - def __init__( - self, - qname: dns.name.Name, - rdtype: dns.rdatatype.RdataType, - rdclass: dns.rdataclass.RdataClass, - response: dns.message.QueryMessage, - nameserver: Optional[str] = None, - port: Optional[int] = None, - ) -> None: - self.qname = qname - self.rdtype = rdtype - self.rdclass = rdclass - self.response = response - self.nameserver = nameserver - self.port = port - self.chaining_result = response.resolve_chaining() - # Copy some attributes out of chaining_result for backwards - # compatibility and convenience. - self.canonical_name = self.chaining_result.canonical_name - self.rrset = self.chaining_result.answer - self.expiration = time.time() + self.chaining_result.minimum_ttl - - def __getattr__(self, attr): # pragma: no cover - if attr == "name": - return self.rrset.name - elif attr == "ttl": - return self.rrset.ttl - elif attr == "covers": - return self.rrset.covers - elif attr == "rdclass": - return self.rrset.rdclass - elif attr == "rdtype": - return self.rrset.rdtype - else: - raise AttributeError(attr) - - def __len__(self) -> int: - return self.rrset and len(self.rrset) or 0 - - def __iter__(self) -> Iterator[dns.rdata.Rdata]: - return self.rrset and iter(self.rrset) or iter(tuple()) - - def __getitem__(self, i): - if self.rrset is None: - raise IndexError - return self.rrset[i] - - def __delitem__(self, i): - if self.rrset is None: - raise IndexError - del self.rrset[i] - - -class Answers(dict): - """A dict of DNS stub resolver answers, indexed by type.""" - - -class HostAnswers(Answers): - """A dict of DNS stub resolver answers to a host name lookup, indexed by - type. - """ - - @classmethod - def make( - cls, - v6: Optional[Answer] = None, - v4: Optional[Answer] = None, - add_empty: bool = True, - ) -> "HostAnswers": - answers = HostAnswers() - if v6 is not None and (add_empty or v6.rrset): - answers[dns.rdatatype.AAAA] = v6 - if v4 is not None and (add_empty or v4.rrset): - answers[dns.rdatatype.A] = v4 - return answers - - # Returns pairs of (address, family) from this result, potentially - # filtering by address family. - def addresses_and_families( - self, family: int = socket.AF_UNSPEC - ) -> Iterator[Tuple[str, int]]: - if family == socket.AF_UNSPEC: - yield from self.addresses_and_families(socket.AF_INET6) - yield from self.addresses_and_families(socket.AF_INET) - return - elif family == socket.AF_INET6: - answer = self.get(dns.rdatatype.AAAA) - elif family == socket.AF_INET: - answer = self.get(dns.rdatatype.A) - else: # pragma: no cover - raise NotImplementedError(f"unknown address family {family}") - if answer: - for rdata in answer: - yield (rdata.address, family) - - # Returns addresses from this result, potentially filtering by - # address family. - def addresses(self, family: int = socket.AF_UNSPEC) -> Iterator[str]: - return (pair[0] for pair in self.addresses_and_families(family)) - - # Returns the canonical name from this result. - def canonical_name(self) -> dns.name.Name: - answer = self.get(dns.rdatatype.AAAA, self.get(dns.rdatatype.A)) - return answer.canonical_name - - -class CacheStatistics: - """Cache Statistics""" - - def __init__(self, hits: int = 0, misses: int = 0) -> None: - self.hits = hits - self.misses = misses - - def reset(self) -> None: - self.hits = 0 - self.misses = 0 - - def clone(self) -> "CacheStatistics": - return CacheStatistics(self.hits, self.misses) - - -class CacheBase: - def __init__(self) -> None: - self.lock = threading.Lock() - self.statistics = CacheStatistics() - - def reset_statistics(self) -> None: - """Reset all statistics to zero.""" - with self.lock: - self.statistics.reset() - - def hits(self) -> int: - """How many hits has the cache had?""" - with self.lock: - return self.statistics.hits - - def misses(self) -> int: - """How many misses has the cache had?""" - with self.lock: - return self.statistics.misses - - def get_statistics_snapshot(self) -> CacheStatistics: - """Return a consistent snapshot of all the statistics. - - If running with multiple threads, it's better to take a - snapshot than to call statistics methods such as hits() and - misses() individually. - """ - with self.lock: - return self.statistics.clone() - - -CacheKey = Tuple[dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass] - - -class Cache(CacheBase): - """Simple thread-safe DNS answer cache.""" - - def __init__(self, cleaning_interval: float = 300.0) -> None: - """*cleaning_interval*, a ``float`` is the number of seconds between - periodic cleanings. - """ - - super().__init__() - self.data: Dict[CacheKey, Answer] = {} - self.cleaning_interval = cleaning_interval - self.next_cleaning: float = time.time() + self.cleaning_interval - - def _maybe_clean(self) -> None: - """Clean the cache if it's time to do so.""" - - now = time.time() - if self.next_cleaning <= now: - keys_to_delete = [] - for k, v in self.data.items(): - if v.expiration <= now: - keys_to_delete.append(k) - for k in keys_to_delete: - del self.data[k] - now = time.time() - self.next_cleaning = now + self.cleaning_interval - - def get(self, key: CacheKey) -> Optional[Answer]: - """Get the answer associated with *key*. - - Returns None if no answer is cached for the key. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - Returns a ``dns.resolver.Answer`` or ``None``. - """ - - with self.lock: - self._maybe_clean() - v = self.data.get(key) - if v is None or v.expiration <= time.time(): - self.statistics.misses += 1 - return None - self.statistics.hits += 1 - return v - - def put(self, key: CacheKey, value: Answer) -> None: - """Associate key and value in the cache. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - *value*, a ``dns.resolver.Answer``, the answer. - """ - - with self.lock: - self._maybe_clean() - self.data[key] = value - - def flush(self, key: Optional[CacheKey] = None) -> None: - """Flush the cache. - - If *key* is not ``None``, only that item is flushed. Otherwise the entire cache - is flushed. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - """ - - with self.lock: - if key is not None: - if key in self.data: - del self.data[key] - else: - self.data = {} - self.next_cleaning = time.time() + self.cleaning_interval - - -class LRUCacheNode: - """LRUCache node.""" - - def __init__(self, key, value): - self.key = key - self.value = value - self.hits = 0 - self.prev = self - self.next = self - - def link_after(self, node: "LRUCacheNode") -> None: - self.prev = node - self.next = node.next - node.next.prev = self - node.next = self - - def unlink(self) -> None: - self.next.prev = self.prev - self.prev.next = self.next - - -class LRUCache(CacheBase): - """Thread-safe, bounded, least-recently-used DNS answer cache. - - This cache is better than the simple cache (above) if you're - running a web crawler or other process that does a lot of - resolutions. The LRUCache has a maximum number of nodes, and when - it is full, the least-recently used node is removed to make space - for a new one. - """ - - def __init__(self, max_size: int = 100000) -> None: - """*max_size*, an ``int``, is the maximum number of nodes to cache; - it must be greater than 0. - """ - - super().__init__() - self.data: Dict[CacheKey, LRUCacheNode] = {} - self.set_max_size(max_size) - self.sentinel: LRUCacheNode = LRUCacheNode(None, None) - self.sentinel.prev = self.sentinel - self.sentinel.next = self.sentinel - - def set_max_size(self, max_size: int) -> None: - if max_size < 1: - max_size = 1 - self.max_size = max_size - - def get(self, key: CacheKey) -> Optional[Answer]: - """Get the answer associated with *key*. - - Returns None if no answer is cached for the key. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - Returns a ``dns.resolver.Answer`` or ``None``. - """ - - with self.lock: - node = self.data.get(key) - if node is None: - self.statistics.misses += 1 - return None - # Unlink because we're either going to move the node to the front - # of the LRU list or we're going to free it. - node.unlink() - if node.value.expiration <= time.time(): - del self.data[node.key] - self.statistics.misses += 1 - return None - node.link_after(self.sentinel) - self.statistics.hits += 1 - node.hits += 1 - return node.value - - def get_hits_for_key(self, key: CacheKey) -> int: - """Return the number of cache hits associated with the specified key.""" - with self.lock: - node = self.data.get(key) - if node is None or node.value.expiration <= time.time(): - return 0 - else: - return node.hits - - def put(self, key: CacheKey, value: Answer) -> None: - """Associate key and value in the cache. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - *value*, a ``dns.resolver.Answer``, the answer. - """ - - with self.lock: - node = self.data.get(key) - if node is not None: - node.unlink() - del self.data[node.key] - while len(self.data) >= self.max_size: - gnode = self.sentinel.prev - gnode.unlink() - del self.data[gnode.key] - node = LRUCacheNode(key, value) - node.link_after(self.sentinel) - self.data[key] = node - - def flush(self, key: Optional[CacheKey] = None) -> None: - """Flush the cache. - - If *key* is not ``None``, only that item is flushed. Otherwise the entire cache - is flushed. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - """ - - with self.lock: - if key is not None: - node = self.data.get(key) - if node is not None: - node.unlink() - del self.data[node.key] - else: - gnode = self.sentinel.next - while gnode != self.sentinel: - next = gnode.next - gnode.unlink() - gnode = next - self.data = {} - - -class _Resolution: - """Helper class for dns.resolver.Resolver.resolve(). - - All of the "business logic" of resolution is encapsulated in this - class, allowing us to have multiple resolve() implementations - using different I/O schemes without copying all of the - complicated logic. - - This class is a "friend" to dns.resolver.Resolver and manipulates - resolver data structures directly. - """ - - def __init__( - self, - resolver: "BaseResolver", - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - rdclass: Union[dns.rdataclass.RdataClass, str], - tcp: bool, - raise_on_no_answer: bool, - search: Optional[bool], - ) -> None: - if isinstance(qname, str): - qname = dns.name.from_text(qname, None) - rdtype = dns.rdatatype.RdataType.make(rdtype) - if dns.rdatatype.is_metatype(rdtype): - raise NoMetaqueries - rdclass = dns.rdataclass.RdataClass.make(rdclass) - if dns.rdataclass.is_metaclass(rdclass): - raise NoMetaqueries - self.resolver = resolver - self.qnames_to_try = resolver._get_qnames_to_try(qname, search) - self.qnames = self.qnames_to_try[:] - self.rdtype = rdtype - self.rdclass = rdclass - self.tcp = tcp - self.raise_on_no_answer = raise_on_no_answer - self.nxdomain_responses: Dict[dns.name.Name, dns.message.QueryMessage] = {} - # Initialize other things to help analysis tools - self.qname = dns.name.empty - self.nameservers: List[dns.nameserver.Nameserver] = [] - self.current_nameservers: List[dns.nameserver.Nameserver] = [] - self.errors: List[ErrorTuple] = [] - self.nameserver: Optional[dns.nameserver.Nameserver] = None - self.tcp_attempt = False - self.retry_with_tcp = False - self.request: Optional[dns.message.QueryMessage] = None - self.backoff = 0.0 - - def next_request( - self, - ) -> Tuple[Optional[dns.message.QueryMessage], Optional[Answer]]: - """Get the next request to send, and check the cache. - - Returns a (request, answer) tuple. At most one of request or - answer will not be None. - """ - - # We return a tuple instead of Union[Message,Answer] as it lets - # the caller avoid isinstance(). - - while len(self.qnames) > 0: - self.qname = self.qnames.pop(0) - - # Do we know the answer? - if self.resolver.cache: - answer = self.resolver.cache.get( - (self.qname, self.rdtype, self.rdclass) - ) - if answer is not None: - if answer.rrset is None and self.raise_on_no_answer: - raise NoAnswer(response=answer.response) - else: - return (None, answer) - answer = self.resolver.cache.get( - (self.qname, dns.rdatatype.ANY, self.rdclass) - ) - if answer is not None and answer.response.rcode() == dns.rcode.NXDOMAIN: - # cached NXDOMAIN; record it and continue to next - # name. - self.nxdomain_responses[self.qname] = answer.response - continue - - # Build the request - request = dns.message.make_query(self.qname, self.rdtype, self.rdclass) - if self.resolver.keyname is not None: - request.use_tsig( - self.resolver.keyring, - self.resolver.keyname, - algorithm=self.resolver.keyalgorithm, - ) - request.use_edns( - self.resolver.edns, - self.resolver.ednsflags, - self.resolver.payload, - options=self.resolver.ednsoptions, - ) - if self.resolver.flags is not None: - request.flags = self.resolver.flags - - self.nameservers = self.resolver._enrich_nameservers( - self.resolver._nameservers, - self.resolver.nameserver_ports, - self.resolver.port, - ) - if self.resolver.rotate: - random.shuffle(self.nameservers) - self.current_nameservers = self.nameservers[:] - self.errors = [] - self.nameserver = None - self.tcp_attempt = False - self.retry_with_tcp = False - self.request = request - self.backoff = 0.10 - - return (request, None) - - # - # We've tried everything and only gotten NXDOMAINs. (We know - # it's only NXDOMAINs as anything else would have returned - # before now.) - # - raise NXDOMAIN(qnames=self.qnames_to_try, responses=self.nxdomain_responses) - - def next_nameserver(self) -> Tuple[dns.nameserver.Nameserver, bool, float]: - if self.retry_with_tcp: - assert self.nameserver is not None - assert not self.nameserver.is_always_max_size() - self.tcp_attempt = True - self.retry_with_tcp = False - return (self.nameserver, True, 0) - - backoff = 0.0 - if not self.current_nameservers: - if len(self.nameservers) == 0: - # Out of things to try! - raise NoNameservers(request=self.request, errors=self.errors) - self.current_nameservers = self.nameservers[:] - backoff = self.backoff - self.backoff = min(self.backoff * 2, 2) - - self.nameserver = self.current_nameservers.pop(0) - self.tcp_attempt = self.tcp or self.nameserver.is_always_max_size() - return (self.nameserver, self.tcp_attempt, backoff) - - def query_result( - self, response: Optional[dns.message.Message], ex: Optional[Exception] - ) -> Tuple[Optional[Answer], bool]: - # - # returns an (answer: Answer, end_loop: bool) tuple. - # - assert self.nameserver is not None - if ex: - # Exception during I/O or from_wire() - assert response is None - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - ex, - response, - ) - ) - if ( - isinstance(ex, dns.exception.FormError) - or isinstance(ex, EOFError) - or isinstance(ex, OSError) - or isinstance(ex, NotImplementedError) - ): - # This nameserver is no good, take it out of the mix. - self.nameservers.remove(self.nameserver) - elif isinstance(ex, dns.message.Truncated): - if self.tcp_attempt: - # Truncation with TCP is no good! - self.nameservers.remove(self.nameserver) - else: - self.retry_with_tcp = True - return (None, False) - # We got an answer! - assert response is not None - assert isinstance(response, dns.message.QueryMessage) - rcode = response.rcode() - if rcode == dns.rcode.NOERROR: - try: - answer = Answer( - self.qname, - self.rdtype, - self.rdclass, - response, - self.nameserver.answer_nameserver(), - self.nameserver.answer_port(), - ) - except Exception as e: - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - e, - response, - ) - ) - # The nameserver is no good, take it out of the mix. - self.nameservers.remove(self.nameserver) - return (None, False) - if self.resolver.cache: - self.resolver.cache.put((self.qname, self.rdtype, self.rdclass), answer) - if answer.rrset is None and self.raise_on_no_answer: - raise NoAnswer(response=answer.response) - return (answer, True) - elif rcode == dns.rcode.NXDOMAIN: - # Further validate the response by making an Answer, even - # if we aren't going to cache it. - try: - answer = Answer( - self.qname, dns.rdatatype.ANY, dns.rdataclass.IN, response - ) - except Exception as e: - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - e, - response, - ) - ) - # The nameserver is no good, take it out of the mix. - self.nameservers.remove(self.nameserver) - return (None, False) - self.nxdomain_responses[self.qname] = response - if self.resolver.cache: - self.resolver.cache.put( - (self.qname, dns.rdatatype.ANY, self.rdclass), answer - ) - # Make next_nameserver() return None, so caller breaks its - # inner loop and calls next_request(). - return (None, True) - elif rcode == dns.rcode.YXDOMAIN: - yex = YXDOMAIN() - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - yex, - response, - ) - ) - raise yex - else: - # - # We got a response, but we're not happy with the - # rcode in it. - # - if rcode != dns.rcode.SERVFAIL or not self.resolver.retry_servfail: - self.nameservers.remove(self.nameserver) - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - dns.rcode.to_text(rcode), - response, - ) - ) - return (None, False) - - -class BaseResolver: - """DNS stub resolver.""" - - # We initialize in reset() - # - # pylint: disable=attribute-defined-outside-init - - domain: dns.name.Name - nameserver_ports: Dict[str, int] - port: int - search: List[dns.name.Name] - use_search_by_default: bool - timeout: float - lifetime: float - keyring: Optional[Any] - keyname: Optional[Union[dns.name.Name, str]] - keyalgorithm: Union[dns.name.Name, str] - edns: int - ednsflags: int - ednsoptions: Optional[List[dns.edns.Option]] - payload: int - cache: Any - flags: Optional[int] - retry_servfail: bool - rotate: bool - ndots: Optional[int] - _nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] - - def __init__( - self, filename: str = "/etc/resolv.conf", configure: bool = True - ) -> None: - """*filename*, a ``str`` or file object, specifying a file - in standard /etc/resolv.conf format. This parameter is meaningful - only when *configure* is true and the platform is POSIX. - - *configure*, a ``bool``. If True (the default), the resolver - instance is configured in the normal fashion for the operating - system the resolver is running on. (I.e. by reading a - /etc/resolv.conf file on POSIX systems and from the registry - on Windows systems.) - """ - - self.reset() - if configure: - if sys.platform == "win32": # pragma: no cover - self.read_registry() - elif filename: - self.read_resolv_conf(filename) - - def reset(self) -> None: - """Reset all resolver configuration to the defaults.""" - - self.domain = dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) - if len(self.domain) == 0: # pragma: no cover - self.domain = dns.name.root - self._nameservers = [] - self.nameserver_ports = {} - self.port = 53 - self.search = [] - self.use_search_by_default = False - self.timeout = 2.0 - self.lifetime = 5.0 - self.keyring = None - self.keyname = None - self.keyalgorithm = dns.tsig.default_algorithm - self.edns = -1 - self.ednsflags = 0 - self.ednsoptions = None - self.payload = 0 - self.cache = None - self.flags = None - self.retry_servfail = False - self.rotate = False - self.ndots = None - - def read_resolv_conf(self, f: Any) -> None: - """Process *f* as a file in the /etc/resolv.conf format. If f is - a ``str``, it is used as the name of the file to open; otherwise it - is treated as the file itself. - - Interprets the following items: - - - nameserver - name server IP address - - - domain - local domain name - - - search - search list for host-name lookup - - - options - supported options are rotate, timeout, edns0, and ndots - - """ - - nameservers = [] - if isinstance(f, str): - try: - cm: contextlib.AbstractContextManager = open(f) - except OSError: - # /etc/resolv.conf doesn't exist, can't be read, etc. - raise NoResolverConfiguration(f"cannot open {f}") - else: - cm = contextlib.nullcontext(f) - with cm as f: - for l in f: - if len(l) == 0 or l[0] == "#" or l[0] == ";": - continue - tokens = l.split() - - # Any line containing less than 2 tokens is malformed - if len(tokens) < 2: - continue - - if tokens[0] == "nameserver": - nameservers.append(tokens[1]) - elif tokens[0] == "domain": - self.domain = dns.name.from_text(tokens[1]) - # domain and search are exclusive - self.search = [] - elif tokens[0] == "search": - # the last search wins - self.search = [] - for suffix in tokens[1:]: - self.search.append(dns.name.from_text(suffix)) - # We don't set domain as it is not used if - # len(self.search) > 0 - elif tokens[0] == "options": - for opt in tokens[1:]: - if opt == "rotate": - self.rotate = True - elif opt == "edns0": - self.use_edns() - elif "timeout" in opt: - try: - self.timeout = int(opt.split(":")[1]) - except (ValueError, IndexError): - pass - elif "ndots" in opt: - try: - self.ndots = int(opt.split(":")[1]) - except (ValueError, IndexError): - pass - if len(nameservers) == 0: - raise NoResolverConfiguration("no nameservers") - # Assigning directly instead of appending means we invoke the - # setter logic, with additonal checking and enrichment. - self.nameservers = nameservers - - def read_registry(self) -> None: # pragma: no cover - """Extract resolver configuration from the Windows registry.""" - try: - info = dns.win32util.get_dns_info() # type: ignore - if info.domain is not None: - self.domain = info.domain - self.nameservers = info.nameservers - self.search = info.search - except AttributeError: - raise NotImplementedError - - def _compute_timeout( - self, - start: float, - lifetime: Optional[float] = None, - errors: Optional[List[ErrorTuple]] = None, - ) -> float: - lifetime = self.lifetime if lifetime is None else lifetime - now = time.time() - duration = now - start - if errors is None: - errors = [] - if duration < 0: - if duration < -1: - # Time going backwards is bad. Just give up. - raise LifetimeTimeout(timeout=duration, errors=errors) - else: - # Time went backwards, but only a little. This can - # happen, e.g. under vmware with older linux kernels. - # Pretend it didn't happen. - duration = 0 - if duration >= lifetime: - raise LifetimeTimeout(timeout=duration, errors=errors) - return min(lifetime - duration, self.timeout) - - def _get_qnames_to_try( - self, qname: dns.name.Name, search: Optional[bool] - ) -> List[dns.name.Name]: - # This is a separate method so we can unit test the search - # rules without requiring the Internet. - if search is None: - search = self.use_search_by_default - qnames_to_try = [] - if qname.is_absolute(): - qnames_to_try.append(qname) - else: - abs_qname = qname.concatenate(dns.name.root) - if search: - if len(self.search) > 0: - # There is a search list, so use it exclusively - search_list = self.search[:] - elif self.domain != dns.name.root and self.domain is not None: - # We have some notion of a domain that isn't the root, so - # use it as the search list. - search_list = [self.domain] - else: - search_list = [] - # Figure out the effective ndots (default is 1) - if self.ndots is None: - ndots = 1 - else: - ndots = self.ndots - for suffix in search_list: - qnames_to_try.append(qname + suffix) - if len(qname) > ndots: - # The name has at least ndots dots, so we should try an - # absolute query first. - qnames_to_try.insert(0, abs_qname) - else: - # The name has less than ndots dots, so we should search - # first, then try the absolute name. - qnames_to_try.append(abs_qname) - else: - qnames_to_try.append(abs_qname) - return qnames_to_try - - def use_tsig( - self, - keyring: Any, - keyname: Optional[Union[dns.name.Name, str]] = None, - algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, - ) -> None: - """Add a TSIG signature to each query. - - The parameters are passed to ``dns.message.Message.use_tsig()``; - see its documentation for details. - """ - - self.keyring = keyring - self.keyname = keyname - self.keyalgorithm = algorithm - - def use_edns( - self, - edns: Optional[Union[int, bool]] = 0, - ednsflags: int = 0, - payload: int = dns.message.DEFAULT_EDNS_PAYLOAD, - options: Optional[List[dns.edns.Option]] = None, - ) -> None: - """Configure EDNS behavior. - - *edns*, an ``int``, is the EDNS level to use. Specifying - ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case - the other parameters are ignored. Specifying ``True`` is - equivalent to specifying 0, i.e. "use EDNS0". - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the - maximum size of UDP datagram the sender can handle. I.e. how big - a response to this message can be. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS - options. - """ - - if edns is None or edns is False: - edns = -1 - elif edns is True: - edns = 0 - self.edns = edns - self.ednsflags = ednsflags - self.payload = payload - self.ednsoptions = options - - def set_flags(self, flags: int) -> None: - """Overrides the default flags with your own. - - *flags*, an ``int``, the message flags to use. - """ - - self.flags = flags - - @classmethod - def _enrich_nameservers( - cls, - nameservers: Sequence[Union[str, dns.nameserver.Nameserver]], - nameserver_ports: Dict[str, int], - default_port: int, - ) -> List[dns.nameserver.Nameserver]: - enriched_nameservers = [] - if isinstance(nameservers, list): - for nameserver in nameservers: - enriched_nameserver: dns.nameserver.Nameserver - if isinstance(nameserver, dns.nameserver.Nameserver): - enriched_nameserver = nameserver - elif dns.inet.is_address(nameserver): - port = nameserver_ports.get(nameserver, default_port) - enriched_nameserver = dns.nameserver.Do53Nameserver( - nameserver, port - ) - else: - try: - if urlparse(nameserver).scheme != "https": - raise NotImplementedError - except Exception: - raise ValueError( - f"nameserver {nameserver} is not a " - "dns.nameserver.Nameserver instance or text form, " - "IP address, nor a valid https URL" - ) - enriched_nameserver = dns.nameserver.DoHNameserver(nameserver) - enriched_nameservers.append(enriched_nameserver) - else: - raise ValueError( - f"nameservers must be a list or tuple (not a {type(nameservers)})" - ) - return enriched_nameservers - - @property - def nameservers( - self, - ) -> Sequence[Union[str, dns.nameserver.Nameserver]]: - return self._nameservers - - @nameservers.setter - def nameservers( - self, nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] - ) -> None: - """ - *nameservers*, a ``list`` of nameservers, where a nameserver is either - a string interpretable as a nameserver, or a ``dns.nameserver.Nameserver`` - instance. - - Raises ``ValueError`` if *nameservers* is not a list of nameservers. - """ - # We just call _enrich_nameservers() for checking - self._enrich_nameservers(nameservers, self.nameserver_ports, self.port) - self._nameservers = nameservers - - -class Resolver(BaseResolver): - """DNS stub resolver.""" - - def resolve( - self, - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - ) -> Answer: # pylint: disable=arguments-differ - """Query nameservers to find the answer to the question. - - The *qname*, *rdtype*, and *rdclass* parameters may be objects - of the appropriate type, or strings that can be converted into objects - of the appropriate type. - - *qname*, a ``dns.name.Name`` or ``str``, the query name. - - *rdtype*, an ``int`` or ``str``, the query type. - - *rdclass*, an ``int`` or ``str``, the query class. - - *tcp*, a ``bool``. If ``True``, use TCP to make the query. - - *source*, a ``str`` or ``None``. If not ``None``, bind to this IP - address when making queries. - - *raise_on_no_answer*, a ``bool``. If ``True``, raise - ``dns.resolver.NoAnswer`` if there's no answer to the question. - - *source_port*, an ``int``, the port from which to send the message. - - *lifetime*, a ``float``, how many seconds a query should run - before timing out. - - *search*, a ``bool`` or ``None``, determines whether the - search list configured in the system's resolver configuration - are used for relative names, and whether the resolver's domain - may be added to relative names. The default is ``None``, - which causes the value of the resolver's - ``use_search_by_default`` attribute to be used. - - Raises ``dns.resolver.LifetimeTimeout`` if no answers could be found - in the specified lifetime. - - Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. - - Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after - DNAME substitution. - - Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is - ``True`` and the query name exists but has no RRset of the - desired type and class. - - Raises ``dns.resolver.NoNameservers`` if no non-broken - nameservers are available to answer the question. - - Returns a ``dns.resolver.Answer`` instance. - - """ - - resolution = _Resolution( - self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search - ) - start = time.time() - while True: - (request, answer) = resolution.next_request() - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - # cache hit! - return answer - assert request is not None # needed for type checking - done = False - while not done: - (nameserver, tcp, backoff) = resolution.next_nameserver() - if backoff: - time.sleep(backoff) - timeout = self._compute_timeout(start, lifetime, resolution.errors) - try: - response = nameserver.query( - request, - timeout=timeout, - source=source, - source_port=source_port, - max_size=tcp, - ) - except Exception as ex: - (_, done) = resolution.query_result(None, ex) - continue - (answer, done) = resolution.query_result(response, None) - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - return answer - - def query( - self, - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - ) -> Answer: # pragma: no cover - """Query nameservers to find the answer to the question. - - This method calls resolve() with ``search=True``, and is - provided for backwards compatibility with prior versions of - dnspython. See the documentation for the resolve() method for - further details. - """ - warnings.warn( - "please use dns.resolver.Resolver.resolve() instead", - DeprecationWarning, - stacklevel=2, - ) - return self.resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - True, - ) - - def resolve_address(self, ipaddr: str, *args: Any, **kwargs: Any) -> Answer: - """Use a resolver to run a reverse query for PTR records. - - This utilizes the resolve() method to perform a PTR lookup on the - specified IP address. - - *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get - the PTR record for. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs["rdtype"] = dns.rdatatype.PTR - modified_kwargs["rdclass"] = dns.rdataclass.IN - return self.resolve( - dns.reversename.from_address(ipaddr), *args, **modified_kwargs - ) - - def resolve_name( - self, - name: Union[dns.name.Name, str], - family: int = socket.AF_UNSPEC, - **kwargs: Any, - ) -> HostAnswers: - """Use a resolver to query for address records. - - This utilizes the resolve() method to perform A and/or AAAA lookups on - the specified name. - - *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. - - *family*, an ``int``, the address family. If socket.AF_UNSPEC - (the default), both A and AAAA records will be retrieved. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs.pop("rdtype", None) - modified_kwargs["rdclass"] = dns.rdataclass.IN - - if family == socket.AF_INET: - v4 = self.resolve(name, dns.rdatatype.A, **modified_kwargs) - return HostAnswers.make(v4=v4) - elif family == socket.AF_INET6: - v6 = self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) - return HostAnswers.make(v6=v6) - elif family != socket.AF_UNSPEC: # pragma: no cover - raise NotImplementedError(f"unknown address family {family}") - - raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) - lifetime = modified_kwargs.pop("lifetime", None) - start = time.time() - v6 = self.resolve( - name, - dns.rdatatype.AAAA, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - # Note that setting name ensures we query the same name - # for A as we did for AAAA. (This is just in case search lists - # are active by default in the resolver configuration and - # we might be talking to a server that says NXDOMAIN when it - # wants to say NOERROR no data. - name = v6.qname - v4 = self.resolve( - name, - dns.rdatatype.A, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - answers = HostAnswers.make(v6=v6, v4=v4, add_empty=not raise_on_no_answer) - if not answers: - raise NoAnswer(response=v6.response) - return answers - - # pylint: disable=redefined-outer-name - - def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - The canonical name is the name the resolver uses for queries - after all CNAME and DNAME renamings have been applied. - - *name*, a ``dns.name.Name`` or ``str``, the query name. - - This method can raise any exception that ``resolve()`` can - raise, other than ``dns.resolver.NoAnswer`` and - ``dns.resolver.NXDOMAIN``. - - Returns a ``dns.name.Name``. - """ - try: - answer = self.resolve(name, raise_on_no_answer=False) - canonical_name = answer.canonical_name - except dns.resolver.NXDOMAIN as e: - canonical_name = e.canonical_name - return canonical_name - - # pylint: enable=redefined-outer-name - - def try_ddr(self, lifetime: float = 5.0) -> None: - """Try to update the resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - *lifetime*, a float, is the maximum time to spend attempting DDR. The default - is 5 seconds. - - If the SVCB query is successful and results in a non-empty list of nameservers, - then the resolver's nameservers are set to the returned servers in priority - order. - - The current implementation does not use any address hints from the SVCB record, - nor does it resolve addresses for the SCVB target name, rather it assumes that - the bootstrap nameserver will always be one of the addresses and uses it. - A future revision to the code may offer fuller support. The code verifies that - the bootstrap nameserver is in the Subject Alternative Name field of the - TLS certficate. - """ - try: - expiration = time.time() + lifetime - answer = self.resolve( - dns._ddr._local_resolver_name, "SVCB", lifetime=lifetime - ) - timeout = dns.query._remaining(expiration) - nameservers = dns._ddr._get_nameservers_sync(answer, timeout) - if len(nameservers) > 0: - self.nameservers = nameservers - except Exception: # pragma: no cover - pass - - -#: The default resolver. -default_resolver: Optional[Resolver] = None - - -def get_default_resolver() -> Resolver: - """Get the default resolver, initializing it if necessary.""" - if default_resolver is None: - reset_default_resolver() - assert default_resolver is not None - return default_resolver - - -def reset_default_resolver() -> None: - """Re-initialize default resolver. - - Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX - systems) will be re-read immediately. - """ - - global default_resolver - default_resolver = Resolver() - - -def resolve( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, -) -> Answer: # pragma: no cover - """Query nameservers to find the answer to the question. - - This is a convenience function that uses the default resolver - object to make the query. - - See ``dns.resolver.Resolver.resolve`` for more information on the - parameters. - """ - - return get_default_resolver().resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - ) - - -def query( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, -) -> Answer: # pragma: no cover - """Query nameservers to find the answer to the question. - - This method calls resolve() with ``search=True``, and is - provided for backwards compatibility with prior versions of - dnspython. See the documentation for the resolve() method for - further details. - """ - warnings.warn( - "please use dns.resolver.resolve() instead", DeprecationWarning, stacklevel=2 - ) - return resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - True, - ) - - -def resolve_address(ipaddr: str, *args: Any, **kwargs: Any) -> Answer: - """Use a resolver to run a reverse query for PTR records. - - See ``dns.resolver.Resolver.resolve_address`` for more information on the - parameters. - """ - - return get_default_resolver().resolve_address(ipaddr, *args, **kwargs) - - -def resolve_name( - name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any -) -> HostAnswers: - """Use a resolver to query for address records. - - See ``dns.resolver.Resolver.resolve_name`` for more information on the - parameters. - """ - - return get_default_resolver().resolve_name(name, family, **kwargs) - - -def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - See ``dns.resolver.Resolver.canonical_name`` for more information on the - parameters and possible exceptions. - """ - - return get_default_resolver().canonical_name(name) - - -def try_ddr(lifetime: float = 5.0) -> None: # pragma: no cover - """Try to update the default resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - See :py:func:`dns.resolver.Resolver.try_ddr` for more information. - """ - return get_default_resolver().try_ddr(lifetime) - - -def zone_for_name( - name: Union[dns.name.Name, str], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - tcp: bool = False, - resolver: Optional[Resolver] = None, - lifetime: Optional[float] = None, -) -> dns.name.Name: - """Find the name of the zone which contains the specified name. - - *name*, an absolute ``dns.name.Name`` or ``str``, the query name. - - *rdclass*, an ``int``, the query class. - - *tcp*, a ``bool``. If ``True``, use TCP to make the query. - - *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. - If ``None``, the default, then the default resolver is used. - - *lifetime*, a ``float``, the total time to allow for the queries needed - to determine the zone. If ``None``, the default, then only the individual - query limits of the resolver apply. - - Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS - root. (This is only likely to happen if you're using non-default - root servers in your network and they are misconfigured.) - - Raises ``dns.resolver.LifetimeTimeout`` if the answer could not be - found in the allotted lifetime. - - Returns a ``dns.name.Name``. - """ - - if isinstance(name, str): - name = dns.name.from_text(name, dns.name.root) - if resolver is None: - resolver = get_default_resolver() - if not name.is_absolute(): - raise NotAbsolute(name) - start = time.time() - expiration: Optional[float] - if lifetime is not None: - expiration = start + lifetime - else: - expiration = None - while 1: - try: - rlifetime: Optional[float] - if expiration is not None: - rlifetime = expiration - time.time() - if rlifetime <= 0: - rlifetime = 0 - else: - rlifetime = None - answer = resolver.resolve( - name, dns.rdatatype.SOA, rdclass, tcp, lifetime=rlifetime - ) - assert answer.rrset is not None - if answer.rrset.name == name: - return name - # otherwise we were CNAMEd or DNAMEd and need to look higher - except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e: - if isinstance(e, dns.resolver.NXDOMAIN): - response = e.responses().get(name) - else: - response = e.response() # pylint: disable=no-value-for-parameter - if response: - for rrs in response.authority: - if rrs.rdtype == dns.rdatatype.SOA and rrs.rdclass == rdclass: - (nr, _, _) = rrs.name.fullcompare(name) - if nr == dns.name.NAMERELN_SUPERDOMAIN: - # We're doing a proper superdomain check as - # if the name were equal we ought to have gotten - # it in the answer section! We are ignoring the - # possibility that the authority is insane and - # is including multiple SOA RRs for different - # authorities. - return rrs.name - # we couldn't extract anything useful from the response (e.g. it's - # a type 3 NXDOMAIN) - try: - name = name.parent() - except dns.name.NoParent: - raise NoRootSOA - - -def make_resolver_at( - where: Union[dns.name.Name, str], - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> Resolver: - """Make a stub resolver using the specified destination as the full resolver. - - *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the - full resolver. - - *port*, an ``int``, the port to use. If not specified, the default is 53. - - *family*, an ``int``, the address family to use. This parameter is used if - *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case - the first address returned by ``resolve_name()`` will be used, otherwise the - first address of the specified family will be used. - - *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for - resolution of hostnames. If not specified, the default resolver will be used. - - Returns a ``dns.resolver.Resolver`` or raises an exception. - """ - if resolver is None: - resolver = get_default_resolver() - nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] - if isinstance(where, str) and dns.inet.is_address(where): - nameservers.append(dns.nameserver.Do53Nameserver(where, port)) - else: - for address in resolver.resolve_name(where, family).addresses(): - nameservers.append(dns.nameserver.Do53Nameserver(address, port)) - res = dns.resolver.Resolver(configure=False) - res.nameservers = nameservers - return res - - -def resolve_at( - where: Union[dns.name.Name, str], - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> Answer: - """Query nameservers to find the answer to the question. - - This is a convenience function that calls ``dns.resolver.make_resolver_at()`` to - make a resolver, and then uses it to resolve the query. - - See ``dns.resolver.Resolver.resolve`` for more information on the resolution - parameters, and ``dns.resolver.make_resolver_at`` for information about the resolver - parameters *where*, *port*, *family*, and *resolver*. - - If making more than one query, it is more efficient to call - ``dns.resolver.make_resolver_at()`` and then use that resolver for the queries - instead of calling ``resolve_at()`` multiple times. - """ - return make_resolver_at(where, port, family, resolver).resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - ) - - -# -# Support for overriding the system resolver for all python code in the -# running process. -# - -_protocols_for_socktype = { - socket.SOCK_DGRAM: [socket.SOL_UDP], - socket.SOCK_STREAM: [socket.SOL_TCP], -} - -_resolver = None -_original_getaddrinfo = socket.getaddrinfo -_original_getnameinfo = socket.getnameinfo -_original_getfqdn = socket.getfqdn -_original_gethostbyname = socket.gethostbyname -_original_gethostbyname_ex = socket.gethostbyname_ex -_original_gethostbyaddr = socket.gethostbyaddr - - -def _getaddrinfo( - host=None, service=None, family=socket.AF_UNSPEC, socktype=0, proto=0, flags=0 -): - if flags & socket.AI_NUMERICHOST != 0: - # Short circuit directly into the system's getaddrinfo(). We're - # not adding any value in this case, and this avoids infinite loops - # because dns.query.* needs to call getaddrinfo() for IPv6 scoping - # reasons. We will also do this short circuit below if we - # discover that the host is an address literal. - return _original_getaddrinfo(host, service, family, socktype, proto, flags) - if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: - # Not implemented. We raise a gaierror as opposed to a - # NotImplementedError as it helps callers handle errors more - # appropriately. [Issue #316] - # - # We raise EAI_FAIL as opposed to EAI_SYSTEM because there is - # no EAI_SYSTEM on Windows [Issue #416]. We didn't go for - # EAI_BADFLAGS as the flags aren't bad, we just don't - # implement them. - raise socket.gaierror( - socket.EAI_FAIL, "Non-recoverable failure in name resolution" - ) - if host is None and service is None: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - addrs = [] - canonical_name = None # pylint: disable=redefined-outer-name - # Is host None or an address literal? If so, use the system's - # getaddrinfo(). - if host is None: - return _original_getaddrinfo(host, service, family, socktype, proto, flags) - try: - # We don't care about the result of af_for_address(), we're just - # calling it so it raises an exception if host is not an IPv4 or - # IPv6 address. - dns.inet.af_for_address(host) - return _original_getaddrinfo(host, service, family, socktype, proto, flags) - except Exception: - pass - # Something needs resolution! - try: - answers = _resolver.resolve_name(host, family) - addrs = answers.addresses_and_families() - canonical_name = answers.canonical_name().to_text(True) - except dns.resolver.NXDOMAIN: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - except Exception: - # We raise EAI_AGAIN here as the failure may be temporary - # (e.g. a timeout) and EAI_SYSTEM isn't defined on Windows. - # [Issue #416] - raise socket.gaierror(socket.EAI_AGAIN, "Temporary failure in name resolution") - port = None - try: - # Is it a port literal? - if service is None: - port = 0 - else: - port = int(service) - except Exception: - if flags & socket.AI_NUMERICSERV == 0: - try: - port = socket.getservbyname(service) - except Exception: - pass - if port is None: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - tuples = [] - if socktype == 0: - socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] - else: - socktypes = [socktype] - if flags & socket.AI_CANONNAME != 0: - cname = canonical_name - else: - cname = "" - for addr, af in addrs: - for socktype in socktypes: - for proto in _protocols_for_socktype[socktype]: - addr_tuple = dns.inet.low_level_address_tuple((addr, port), af) - tuples.append((af, socktype, proto, cname, addr_tuple)) - if len(tuples) == 0: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - return tuples - - -def _getnameinfo(sockaddr, flags=0): - host = sockaddr[0] - port = sockaddr[1] - if len(sockaddr) == 4: - scope = sockaddr[3] - family = socket.AF_INET6 - else: - scope = None - family = socket.AF_INET - tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, socket.SOL_TCP, 0) - if len(tuples) > 1: - raise OSError("sockaddr resolved to multiple addresses") - addr = tuples[0][4][0] - if flags & socket.NI_DGRAM: - pname = "udp" - else: - pname = "tcp" - qname = dns.reversename.from_address(addr) - if flags & socket.NI_NUMERICHOST == 0: - try: - answer = _resolver.resolve(qname, "PTR") - hostname = answer.rrset[0].target.to_text(True) - except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): - if flags & socket.NI_NAMEREQD: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - hostname = addr - if scope is not None: - hostname += "%" + str(scope) - else: - hostname = addr - if scope is not None: - hostname += "%" + str(scope) - if flags & socket.NI_NUMERICSERV: - service = str(port) - else: - service = socket.getservbyport(port, pname) - return (hostname, service) - - -def _getfqdn(name=None): - if name is None: - name = socket.gethostname() - try: - (name, _, _) = _gethostbyaddr(name) - # Python's version checks aliases too, but our gethostbyname - # ignores them, so we do so here as well. - except Exception: # pragma: no cover - pass - return name - - -def _gethostbyname(name): - return _gethostbyname_ex(name)[2][0] - - -def _gethostbyname_ex(name): - aliases = [] - addresses = [] - tuples = _getaddrinfo( - name, 0, socket.AF_INET, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME - ) - canonical = tuples[0][3] - for item in tuples: - addresses.append(item[4][0]) - # XXX we just ignore aliases - return (canonical, aliases, addresses) - - -def _gethostbyaddr(ip): - try: - dns.ipv6.inet_aton(ip) - sockaddr = (ip, 80, 0, 0) - family = socket.AF_INET6 - except Exception: - try: - dns.ipv4.inet_aton(ip) - except Exception: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - sockaddr = (ip, 80) - family = socket.AF_INET - (name, _) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) - aliases = [] - addresses = [] - tuples = _getaddrinfo( - name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME - ) - canonical = tuples[0][3] - # We only want to include an address from the tuples if it's the - # same as the one we asked about. We do this comparison in binary - # to avoid any differences in text representations. - bin_ip = dns.inet.inet_pton(family, ip) - for item in tuples: - addr = item[4][0] - bin_addr = dns.inet.inet_pton(family, addr) - if bin_ip == bin_addr: - addresses.append(addr) - # XXX we just ignore aliases - return (canonical, aliases, addresses) - - -def override_system_resolver(resolver: Optional[Resolver] = None) -> None: - """Override the system resolver routines in the socket module with - versions which use dnspython's resolver. - - This can be useful in testing situations where you want to control - the resolution behavior of python code without having to change - the system's resolver settings (e.g. /etc/resolv.conf). - - The resolver to use may be specified; if it's not, the default - resolver will be used. - - resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. - """ - - if resolver is None: - resolver = get_default_resolver() - global _resolver - _resolver = resolver - socket.getaddrinfo = _getaddrinfo - socket.getnameinfo = _getnameinfo - socket.getfqdn = _getfqdn - socket.gethostbyname = _gethostbyname - socket.gethostbyname_ex = _gethostbyname_ex - socket.gethostbyaddr = _gethostbyaddr - - -def restore_system_resolver() -> None: - """Undo the effects of prior override_system_resolver().""" - - global _resolver - _resolver = None - socket.getaddrinfo = _original_getaddrinfo - socket.getnameinfo = _original_getnameinfo - socket.getfqdn = _original_getfqdn - socket.gethostbyname = _original_gethostbyname - socket.gethostbyname_ex = _original_gethostbyname_ex - socket.gethostbyaddr = _original_gethostbyaddr diff --git a/venv/lib/python3.12/site-packages/dns/reversename.py b/venv/lib/python3.12/site-packages/dns/reversename.py deleted file mode 100644 index 8236c711..00000000 --- a/venv/lib/python3.12/site-packages/dns/reversename.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Reverse Map Names.""" - -import binascii - -import dns.ipv4 -import dns.ipv6 -import dns.name - -ipv4_reverse_domain = dns.name.from_text("in-addr.arpa.") -ipv6_reverse_domain = dns.name.from_text("ip6.arpa.") - - -def from_address( - text: str, - v4_origin: dns.name.Name = ipv4_reverse_domain, - v6_origin: dns.name.Name = ipv6_reverse_domain, -) -> dns.name.Name: - """Convert an IPv4 or IPv6 address in textual form into a Name object whose - value is the reverse-map domain name of the address. - - *text*, a ``str``, is an IPv4 or IPv6 address in textual form - (e.g. '127.0.0.1', '::1') - - *v4_origin*, a ``dns.name.Name`` to append to the labels corresponding to - the address if the address is an IPv4 address, instead of the default - (in-addr.arpa.) - - *v6_origin*, a ``dns.name.Name`` to append to the labels corresponding to - the address if the address is an IPv6 address, instead of the default - (ip6.arpa.) - - Raises ``dns.exception.SyntaxError`` if the address is badly formed. - - Returns a ``dns.name.Name``. - """ - - try: - v6 = dns.ipv6.inet_aton(text) - if dns.ipv6.is_mapped(v6): - parts = ["%d" % byte for byte in v6[12:]] - origin = v4_origin - else: - parts = [x for x in str(binascii.hexlify(v6).decode())] - origin = v6_origin - except Exception: - parts = ["%d" % byte for byte in dns.ipv4.inet_aton(text)] - origin = v4_origin - return dns.name.from_text(".".join(reversed(parts)), origin=origin) - - -def to_address( - name: dns.name.Name, - v4_origin: dns.name.Name = ipv4_reverse_domain, - v6_origin: dns.name.Name = ipv6_reverse_domain, -) -> str: - """Convert a reverse map domain name into textual address form. - - *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name - form. - - *v4_origin*, a ``dns.name.Name`` representing the top-level domain for - IPv4 addresses, instead of the default (in-addr.arpa.) - - *v6_origin*, a ``dns.name.Name`` representing the top-level domain for - IPv4 addresses, instead of the default (ip6.arpa.) - - Raises ``dns.exception.SyntaxError`` if the name does not have a - reverse-map form. - - Returns a ``str``. - """ - - if name.is_subdomain(v4_origin): - name = name.relativize(v4_origin) - text = b".".join(reversed(name.labels)) - # run through inet_ntoa() to check syntax and make pretty. - return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) - elif name.is_subdomain(v6_origin): - name = name.relativize(v6_origin) - labels = list(reversed(name.labels)) - parts = [] - for i in range(0, len(labels), 4): - parts.append(b"".join(labels[i : i + 4])) - text = b":".join(parts) - # run through inet_ntoa() to check syntax and make pretty. - return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) - else: - raise dns.exception.SyntaxError("unknown reverse-map address family") diff --git a/venv/lib/python3.12/site-packages/dns/rrset.py b/venv/lib/python3.12/site-packages/dns/rrset.py deleted file mode 100644 index 6f39b108..00000000 --- a/venv/lib/python3.12/site-packages/dns/rrset.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS RRsets (an RRset is a named rdataset)""" - -from typing import Any, Collection, Dict, Optional, Union, cast - -import dns.name -import dns.rdataclass -import dns.rdataset -import dns.renderer - - -class RRset(dns.rdataset.Rdataset): - """A DNS RRset (named rdataset). - - RRset inherits from Rdataset, and RRsets can be treated as - Rdatasets in most cases. There are, however, a few notable - exceptions. RRsets have different to_wire() and to_text() method - arguments, reflecting the fact that RRsets always have an owner - name. - """ - - __slots__ = ["name", "deleting"] - - def __init__( - self, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - deleting: Optional[dns.rdataclass.RdataClass] = None, - ): - """Create a new RRset.""" - - super().__init__(rdclass, rdtype, covers) - self.name = name - self.deleting = deleting - - def _clone(self): - obj = super()._clone() - obj.name = self.name - obj.deleting = self.deleting - return obj - - def __repr__(self): - if self.covers == 0: - ctext = "" - else: - ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" - if self.deleting is not None: - dtext = " delete=" + dns.rdataclass.to_text(self.deleting) - else: - dtext = "" - return ( - "" - ) - - def __str__(self): - return self.to_text() - - def __eq__(self, other): - if isinstance(other, RRset): - if self.name != other.name: - return False - elif not isinstance(other, dns.rdataset.Rdataset): - return False - return super().__eq__(other) - - def match(self, *args: Any, **kwargs: Any) -> bool: # type: ignore[override] - """Does this rrset match the specified attributes? - - Behaves as :py:func:`full_match()` if the first argument is a - ``dns.name.Name``, and as :py:func:`dns.rdataset.Rdataset.match()` - otherwise. - - (This behavior fixes a design mistake where the signature of this - method became incompatible with that of its superclass. The fix - makes RRsets matchable as Rdatasets while preserving backwards - compatibility.) - """ - if isinstance(args[0], dns.name.Name): - return self.full_match(*args, **kwargs) # type: ignore[arg-type] - else: - return super().match(*args, **kwargs) # type: ignore[arg-type] - - def full_match( - self, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - deleting: Optional[dns.rdataclass.RdataClass] = None, - ) -> bool: - """Returns ``True`` if this rrset matches the specified name, class, - type, covers, and deletion state. - """ - if not super().match(rdclass, rdtype, covers): - return False - if self.name != name or self.deleting != deleting: - return False - return True - - # pylint: disable=arguments-differ - - def to_text( # type: ignore[override] - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any], - ) -> str: - """Convert the RRset into DNS zone file format. - - See ``dns.name.Name.choose_relativity`` for more information - on how *origin* and *relativize* determine the way names - are emitted. - - Any additional keyword arguments are passed on to the rdata - ``to_text()`` method. - - *origin*, a ``dns.name.Name`` or ``None``, the origin for relative - names. - - *relativize*, a ``bool``. If ``True``, names will be relativized - to *origin*. - """ - - return super().to_text( - self.name, origin, relativize, self.deleting, **kw # type: ignore - ) - - def to_wire( # type: ignore[override] - self, - file: Any, - compress: Optional[dns.name.CompressType] = None, # type: ignore - origin: Optional[dns.name.Name] = None, - **kw: Dict[str, Any], - ) -> int: - """Convert the RRset to wire format. - - All keyword arguments are passed to ``dns.rdataset.to_wire()``; see - that function for details. - - Returns an ``int``, the number of records emitted. - """ - - return super().to_wire( - self.name, file, compress, origin, self.deleting, **kw # type:ignore - ) - - # pylint: enable=arguments-differ - - def to_rdataset(self) -> dns.rdataset.Rdataset: - """Convert an RRset into an Rdataset. - - Returns a ``dns.rdataset.Rdataset``. - """ - return dns.rdataset.from_rdata_list(self.ttl, list(self)) - - -def from_text_list( - name: Union[dns.name.Name, str], - ttl: int, - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - text_rdatas: Collection[str], - idna_codec: Optional[dns.name.IDNACodec] = None, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, -) -> RRset: - """Create an RRset with the specified name, TTL, class, and type, and with - the specified list of rdatas in text format. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use; if ``None``, the default IDNA 2003 - encoder/decoder is used. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - Returns a ``dns.rrset.RRset`` object. - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None, idna_codec=idna_codec) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - r = RRset(name, rdclass, rdtype) - r.update_ttl(ttl) - for t in text_rdatas: - rd = dns.rdata.from_text( - r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec - ) - r.add(rd) - return r - - -def from_text( - name: Union[dns.name.Name, str], - ttl: int, - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - *text_rdatas: Any, -) -> RRset: - """Create an RRset with the specified name, TTL, class, and type and with - the specified rdatas in text format. - - Returns a ``dns.rrset.RRset`` object. - """ - - return from_text_list( - name, ttl, rdclass, rdtype, cast(Collection[str], text_rdatas) - ) - - -def from_rdata_list( - name: Union[dns.name.Name, str], - ttl: int, - rdatas: Collection[dns.rdata.Rdata], - idna_codec: Optional[dns.name.IDNACodec] = None, -) -> RRset: - """Create an RRset with the specified name and TTL, and with - the specified list of rdata objects. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use; if ``None``, the default IDNA 2003 - encoder/decoder is used. - - Returns a ``dns.rrset.RRset`` object. - - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None, idna_codec=idna_codec) - - if len(rdatas) == 0: - raise ValueError("rdata list must not be empty") - r = None - for rd in rdatas: - if r is None: - r = RRset(name, rd.rdclass, rd.rdtype) - r.update_ttl(ttl) - r.add(rd) - assert r is not None - return r - - -def from_rdata(name: Union[dns.name.Name, str], ttl: int, *rdatas: Any) -> RRset: - """Create an RRset with the specified name and TTL, and with - the specified rdata objects. - - Returns a ``dns.rrset.RRset`` object. - """ - - return from_rdata_list(name, ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/venv/lib/python3.12/site-packages/dns/serial.py b/venv/lib/python3.12/site-packages/dns/serial.py deleted file mode 100644 index 3417299b..00000000 --- a/venv/lib/python3.12/site-packages/dns/serial.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""Serial Number Arthimetic from RFC 1982""" - - -class Serial: - def __init__(self, value: int, bits: int = 32): - self.value = value % 2**bits - self.bits = bits - - def __repr__(self): - return f"dns.serial.Serial({self.value}, {self.bits})" - - def __eq__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - return self.value == other.value - - def __ne__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - return self.value != other.value - - def __lt__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - if self.value < other.value and other.value - self.value < 2 ** (self.bits - 1): - return True - elif self.value > other.value and self.value - other.value > 2 ** ( - self.bits - 1 - ): - return True - else: - return False - - def __le__(self, other): - return self == other or self < other - - def __gt__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - if self.value < other.value and other.value - self.value > 2 ** (self.bits - 1): - return True - elif self.value > other.value and self.value - other.value < 2 ** ( - self.bits - 1 - ): - return True - else: - return False - - def __ge__(self, other): - return self == other or self > other - - def __add__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v += delta - v = v % 2**self.bits - return Serial(v, self.bits) - - def __iadd__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v += delta - v = v % 2**self.bits - self.value = v - return self - - def __sub__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v -= delta - v = v % 2**self.bits - return Serial(v, self.bits) - - def __isub__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v -= delta - v = v % 2**self.bits - self.value = v - return self diff --git a/venv/lib/python3.12/site-packages/dns/set.py b/venv/lib/python3.12/site-packages/dns/set.py deleted file mode 100644 index ae8f0dd5..00000000 --- a/venv/lib/python3.12/site-packages/dns/set.py +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import itertools - - -class Set: - """A simple set class. - - This class was originally used to deal with python not having a set class, and - originally the class used lists in its implementation. The ordered and indexable - nature of RRsets and Rdatasets is unfortunately widely used in dnspython - applications, so for backwards compatibility sets continue to be a custom class, now - based on an ordered dictionary. - """ - - __slots__ = ["items"] - - def __init__(self, items=None): - """Initialize the set. - - *items*, an iterable or ``None``, the initial set of items. - """ - - self.items = dict() - if items is not None: - for item in items: - # This is safe for how we use set, but if other code - # subclasses it could be a legitimate issue. - self.add(item) # lgtm[py/init-calls-subclass] - - def __repr__(self): - return f"dns.set.Set({repr(list(self.items.keys()))})" # pragma: no cover - - def add(self, item): - """Add an item to the set.""" - - if item not in self.items: - self.items[item] = None - - def remove(self, item): - """Remove an item from the set.""" - - try: - del self.items[item] - except KeyError: - raise ValueError - - def discard(self, item): - """Remove an item from the set if present.""" - - self.items.pop(item, None) - - def pop(self): - """Remove an arbitrary item from the set.""" - (k, _) = self.items.popitem() - return k - - def _clone(self) -> "Set": - """Make a (shallow) copy of the set. - - There is a 'clone protocol' that subclasses of this class - should use. To make a copy, first call your super's _clone() - method, and use the object returned as the new instance. Then - make shallow copies of the attributes defined in the subclass. - - This protocol allows us to write the set algorithms that - return new instances (e.g. union) once, and keep using them in - subclasses. - """ - - if hasattr(self, "_clone_class"): - cls = self._clone_class # type: ignore - else: - cls = self.__class__ - obj = cls.__new__(cls) - obj.items = dict() - obj.items.update(self.items) - return obj - - def __copy__(self): - """Make a (shallow) copy of the set.""" - - return self._clone() - - def copy(self): - """Make a (shallow) copy of the set.""" - - return self._clone() - - def union_update(self, other): - """Update the set, adding any elements from other which are not - already in the set. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - return - for item in other.items: - self.add(item) - - def intersection_update(self, other): - """Update the set, removing any elements from other which are not - in both sets. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - return - # we make a copy of the list so that we can remove items from - # the list without breaking the iterator. - for item in list(self.items): - if item not in other.items: - del self.items[item] - - def difference_update(self, other): - """Update the set, removing any elements from other which are in - the set. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - self.items.clear() - else: - for item in other.items: - self.discard(item) - - def symmetric_difference_update(self, other): - """Update the set, retaining only elements unique to both sets.""" - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - self.items.clear() - else: - overlap = self.intersection(other) - self.union_update(other) - self.difference_update(overlap) - - def union(self, other): - """Return a new set which is the union of ``self`` and ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.union_update(other) - return obj - - def intersection(self, other): - """Return a new set which is the intersection of ``self`` and - ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.intersection_update(other) - return obj - - def difference(self, other): - """Return a new set which ``self`` - ``other``, i.e. the items - in ``self`` which are not also in ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.difference_update(other) - return obj - - def symmetric_difference(self, other): - """Return a new set which (``self`` - ``other``) | (``other`` - - ``self), ie: the items in either ``self`` or ``other`` which - are not contained in their intersection. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.symmetric_difference_update(other) - return obj - - def __or__(self, other): - return self.union(other) - - def __and__(self, other): - return self.intersection(other) - - def __add__(self, other): - return self.union(other) - - def __sub__(self, other): - return self.difference(other) - - def __xor__(self, other): - return self.symmetric_difference(other) - - def __ior__(self, other): - self.union_update(other) - return self - - def __iand__(self, other): - self.intersection_update(other) - return self - - def __iadd__(self, other): - self.union_update(other) - return self - - def __isub__(self, other): - self.difference_update(other) - return self - - def __ixor__(self, other): - self.symmetric_difference_update(other) - return self - - def update(self, other): - """Update the set, adding any elements from other which are not - already in the set. - - *other*, the collection of items with which to update the set, which - may be any iterable type. - """ - - for item in other: - self.add(item) - - def clear(self): - """Make the set empty.""" - self.items.clear() - - def __eq__(self, other): - return self.items == other.items - - def __ne__(self, other): - return not self.__eq__(other) - - def __len__(self): - return len(self.items) - - def __iter__(self): - return iter(self.items) - - def __getitem__(self, i): - if isinstance(i, slice): - return list(itertools.islice(self.items, i.start, i.stop, i.step)) - else: - return next(itertools.islice(self.items, i, i + 1)) - - def __delitem__(self, i): - if isinstance(i, slice): - for elt in list(self[i]): - del self.items[elt] - else: - del self.items[self[i]] - - def issubset(self, other): - """Is this set a subset of *other*? - - Returns a ``bool``. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - for item in self.items: - if item not in other.items: - return False - return True - - def issuperset(self, other): - """Is this set a superset of *other*? - - Returns a ``bool``. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - for item in other.items: - if item not in self.items: - return False - return True - - def isdisjoint(self, other): - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - for item in other.items: - if item in self.items: - return False - return True diff --git a/venv/lib/python3.12/site-packages/dns/tokenizer.py b/venv/lib/python3.12/site-packages/dns/tokenizer.py deleted file mode 100644 index ab205bc3..00000000 --- a/venv/lib/python3.12/site-packages/dns/tokenizer.py +++ /dev/null @@ -1,708 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Tokenize DNS zone file format""" - -import io -import sys -from typing import Any, List, Optional, Tuple - -import dns.exception -import dns.name -import dns.ttl - -_DELIMITERS = {" ", "\t", "\n", ";", "(", ")", '"'} -_QUOTING_DELIMITERS = {'"'} - -EOF = 0 -EOL = 1 -WHITESPACE = 2 -IDENTIFIER = 3 -QUOTED_STRING = 4 -COMMENT = 5 -DELIMITER = 6 - - -class UngetBufferFull(dns.exception.DNSException): - """An attempt was made to unget a token when the unget buffer was full.""" - - -class Token: - """A DNS zone file format token. - - ttype: The token type - value: The token value - has_escape: Does the token value contain escapes? - """ - - def __init__( - self, - ttype: int, - value: Any = "", - has_escape: bool = False, - comment: Optional[str] = None, - ): - """Initialize a token instance.""" - - self.ttype = ttype - self.value = value - self.has_escape = has_escape - self.comment = comment - - def is_eof(self) -> bool: - return self.ttype == EOF - - def is_eol(self) -> bool: - return self.ttype == EOL - - def is_whitespace(self) -> bool: - return self.ttype == WHITESPACE - - def is_identifier(self) -> bool: - return self.ttype == IDENTIFIER - - def is_quoted_string(self) -> bool: - return self.ttype == QUOTED_STRING - - def is_comment(self) -> bool: - return self.ttype == COMMENT - - def is_delimiter(self) -> bool: # pragma: no cover (we don't return delimiters yet) - return self.ttype == DELIMITER - - def is_eol_or_eof(self) -> bool: - return self.ttype == EOL or self.ttype == EOF - - def __eq__(self, other): - if not isinstance(other, Token): - return False - return self.ttype == other.ttype and self.value == other.value - - def __ne__(self, other): - if not isinstance(other, Token): - return True - return self.ttype != other.ttype or self.value != other.value - - def __str__(self): - return '%d "%s"' % (self.ttype, self.value) - - def unescape(self) -> "Token": - if not self.has_escape: - return self - unescaped = "" - l = len(self.value) - i = 0 - while i < l: - c = self.value[i] - i += 1 - if c == "\\": - if i >= l: # pragma: no cover (can't happen via get()) - raise dns.exception.UnexpectedEnd - c = self.value[i] - i += 1 - if c.isdigit(): - if i >= l: - raise dns.exception.UnexpectedEnd - c2 = self.value[i] - i += 1 - if i >= l: - raise dns.exception.UnexpectedEnd - c3 = self.value[i] - i += 1 - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - codepoint = int(c) * 100 + int(c2) * 10 + int(c3) - if codepoint > 255: - raise dns.exception.SyntaxError - c = chr(codepoint) - unescaped += c - return Token(self.ttype, unescaped) - - def unescape_to_bytes(self) -> "Token": - # We used to use unescape() for TXT-like records, but this - # caused problems as we'd process DNS escapes into Unicode code - # points instead of byte values, and then a to_text() of the - # processed data would not equal the original input. For - # example, \226 in the TXT record would have a to_text() of - # \195\162 because we applied UTF-8 encoding to Unicode code - # point 226. - # - # We now apply escapes while converting directly to bytes, - # avoiding this double encoding. - # - # This code also handles cases where the unicode input has - # non-ASCII code-points in it by converting it to UTF-8. TXT - # records aren't defined for Unicode, but this is the best we - # can do to preserve meaning. For example, - # - # foo\u200bbar - # - # (where \u200b is Unicode code point 0x200b) will be treated - # as if the input had been the UTF-8 encoding of that string, - # namely: - # - # foo\226\128\139bar - # - unescaped = b"" - l = len(self.value) - i = 0 - while i < l: - c = self.value[i] - i += 1 - if c == "\\": - if i >= l: # pragma: no cover (can't happen via get()) - raise dns.exception.UnexpectedEnd - c = self.value[i] - i += 1 - if c.isdigit(): - if i >= l: - raise dns.exception.UnexpectedEnd - c2 = self.value[i] - i += 1 - if i >= l: - raise dns.exception.UnexpectedEnd - c3 = self.value[i] - i += 1 - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - codepoint = int(c) * 100 + int(c2) * 10 + int(c3) - if codepoint > 255: - raise dns.exception.SyntaxError - unescaped += b"%c" % (codepoint) - else: - # Note that as mentioned above, if c is a Unicode - # code point outside of the ASCII range, then this - # += is converting that code point to its UTF-8 - # encoding and appending multiple bytes to - # unescaped. - unescaped += c.encode() - else: - unescaped += c.encode() - return Token(self.ttype, bytes(unescaped)) - - -class Tokenizer: - """A DNS zone file format tokenizer. - - A token object is basically a (type, value) tuple. The valid - types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, - COMMENT, and DELIMITER. - - file: The file to tokenize - - ungotten_char: The most recently ungotten character, or None. - - ungotten_token: The most recently ungotten token, or None. - - multiline: The current multiline level. This value is increased - by one every time a '(' delimiter is read, and decreased by one every time - a ')' delimiter is read. - - quoting: This variable is true if the tokenizer is currently - reading a quoted string. - - eof: This variable is true if the tokenizer has encountered EOF. - - delimiters: The current delimiter dictionary. - - line_number: The current line number - - filename: A filename that will be returned by the where() method. - - idna_codec: A dns.name.IDNACodec, specifies the IDNA - encoder/decoder. If None, the default IDNA 2003 - encoder/decoder is used. - """ - - def __init__( - self, - f: Any = sys.stdin, - filename: Optional[str] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, - ): - """Initialize a tokenizer instance. - - f: The file to tokenize. The default is sys.stdin. - This parameter may also be a string, in which case the tokenizer - will take its input from the contents of the string. - - filename: the name of the filename that the where() method - will return. - - idna_codec: A dns.name.IDNACodec, specifies the IDNA - encoder/decoder. If None, the default IDNA 2003 - encoder/decoder is used. - """ - - if isinstance(f, str): - f = io.StringIO(f) - if filename is None: - filename = "" - elif isinstance(f, bytes): - f = io.StringIO(f.decode()) - if filename is None: - filename = "" - else: - if filename is None: - if f is sys.stdin: - filename = "" - else: - filename = "" - self.file = f - self.ungotten_char: Optional[str] = None - self.ungotten_token: Optional[Token] = None - self.multiline = 0 - self.quoting = False - self.eof = False - self.delimiters = _DELIMITERS - self.line_number = 1 - assert filename is not None - self.filename = filename - if idna_codec is None: - self.idna_codec: dns.name.IDNACodec = dns.name.IDNA_2003 - else: - self.idna_codec = idna_codec - - def _get_char(self) -> str: - """Read a character from input.""" - - if self.ungotten_char is None: - if self.eof: - c = "" - else: - c = self.file.read(1) - if c == "": - self.eof = True - elif c == "\n": - self.line_number += 1 - else: - c = self.ungotten_char - self.ungotten_char = None - return c - - def where(self) -> Tuple[str, int]: - """Return the current location in the input. - - Returns a (string, int) tuple. The first item is the filename of - the input, the second is the current line number. - """ - - return (self.filename, self.line_number) - - def _unget_char(self, c: str) -> None: - """Unget a character. - - The unget buffer for characters is only one character large; it is - an error to try to unget a character when the unget buffer is not - empty. - - c: the character to unget - raises UngetBufferFull: there is already an ungotten char - """ - - if self.ungotten_char is not None: - # this should never happen! - raise UngetBufferFull # pragma: no cover - self.ungotten_char = c - - def skip_whitespace(self) -> int: - """Consume input until a non-whitespace character is encountered. - - The non-whitespace character is then ungotten, and the number of - whitespace characters consumed is returned. - - If the tokenizer is in multiline mode, then newlines are whitespace. - - Returns the number of characters skipped. - """ - - skipped = 0 - while True: - c = self._get_char() - if c != " " and c != "\t": - if (c != "\n") or not self.multiline: - self._unget_char(c) - return skipped - skipped += 1 - - def get(self, want_leading: bool = False, want_comment: bool = False) -> Token: - """Get the next token. - - want_leading: If True, return a WHITESPACE token if the - first character read is whitespace. The default is False. - - want_comment: If True, return a COMMENT token if the - first token read is a comment. The default is False. - - Raises dns.exception.UnexpectedEnd: input ended prematurely - - Raises dns.exception.SyntaxError: input was badly formed - - Returns a Token. - """ - - if self.ungotten_token is not None: - utoken = self.ungotten_token - self.ungotten_token = None - if utoken.is_whitespace(): - if want_leading: - return utoken - elif utoken.is_comment(): - if want_comment: - return utoken - else: - return utoken - skipped = self.skip_whitespace() - if want_leading and skipped > 0: - return Token(WHITESPACE, " ") - token = "" - ttype = IDENTIFIER - has_escape = False - while True: - c = self._get_char() - if c == "" or c in self.delimiters: - if c == "" and self.quoting: - raise dns.exception.UnexpectedEnd - if token == "" and ttype != QUOTED_STRING: - if c == "(": - self.multiline += 1 - self.skip_whitespace() - continue - elif c == ")": - if self.multiline <= 0: - raise dns.exception.SyntaxError - self.multiline -= 1 - self.skip_whitespace() - continue - elif c == '"': - if not self.quoting: - self.quoting = True - self.delimiters = _QUOTING_DELIMITERS - ttype = QUOTED_STRING - continue - else: - self.quoting = False - self.delimiters = _DELIMITERS - self.skip_whitespace() - continue - elif c == "\n": - return Token(EOL, "\n") - elif c == ";": - while 1: - c = self._get_char() - if c == "\n" or c == "": - break - token += c - if want_comment: - self._unget_char(c) - return Token(COMMENT, token) - elif c == "": - if self.multiline: - raise dns.exception.SyntaxError( - "unbalanced parentheses" - ) - return Token(EOF, comment=token) - elif self.multiline: - self.skip_whitespace() - token = "" - continue - else: - return Token(EOL, "\n", comment=token) - else: - # This code exists in case we ever want a - # delimiter to be returned. It never produces - # a token currently. - token = c - ttype = DELIMITER - else: - self._unget_char(c) - break - elif self.quoting and c == "\n": - raise dns.exception.SyntaxError("newline in quoted string") - elif c == "\\": - # - # It's an escape. Put it and the next character into - # the token; it will be checked later for goodness. - # - token += c - has_escape = True - c = self._get_char() - if c == "" or (c == "\n" and not self.quoting): - raise dns.exception.UnexpectedEnd - token += c - if token == "" and ttype != QUOTED_STRING: - if self.multiline: - raise dns.exception.SyntaxError("unbalanced parentheses") - ttype = EOF - return Token(ttype, token, has_escape) - - def unget(self, token: Token) -> None: - """Unget a token. - - The unget buffer for tokens is only one token large; it is - an error to try to unget a token when the unget buffer is not - empty. - - token: the token to unget - - Raises UngetBufferFull: there is already an ungotten token - """ - - if self.ungotten_token is not None: - raise UngetBufferFull - self.ungotten_token = token - - def next(self): - """Return the next item in an iteration. - - Returns a Token. - """ - - token = self.get() - if token.is_eof(): - raise StopIteration - return token - - __next__ = next - - def __iter__(self): - return self - - # Helpers - - def get_int(self, base: int = 10) -> int: - """Read the next token and interpret it as an unsigned integer. - - Raises dns.exception.SyntaxError if not an unsigned integer. - - Returns an int. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - if not token.value.isdigit(): - raise dns.exception.SyntaxError("expecting an integer") - return int(token.value, base) - - def get_uint8(self) -> int: - """Read the next token and interpret it as an 8-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int() - if value < 0 or value > 255: - raise dns.exception.SyntaxError( - "%d is not an unsigned 8-bit integer" % value - ) - return value - - def get_uint16(self, base: int = 10) -> int: - """Read the next token and interpret it as a 16-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int(base=base) - if value < 0 or value > 65535: - if base == 8: - raise dns.exception.SyntaxError( - f"{value:o} is not an octal unsigned 16-bit integer" - ) - else: - raise dns.exception.SyntaxError( - "%d is not an unsigned 16-bit integer" % value - ) - return value - - def get_uint32(self, base: int = 10) -> int: - """Read the next token and interpret it as a 32-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int(base=base) - if value < 0 or value > 4294967295: - raise dns.exception.SyntaxError( - "%d is not an unsigned 32-bit integer" % value - ) - return value - - def get_uint48(self, base: int = 10) -> int: - """Read the next token and interpret it as a 48-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 48-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int(base=base) - if value < 0 or value > 281474976710655: - raise dns.exception.SyntaxError( - "%d is not an unsigned 48-bit integer" % value - ) - return value - - def get_string(self, max_length: Optional[int] = None) -> str: - """Read the next token and interpret it as a string. - - Raises dns.exception.SyntaxError if not a string. - Raises dns.exception.SyntaxError if token value length - exceeds max_length (if specified). - - Returns a string. - """ - - token = self.get().unescape() - if not (token.is_identifier() or token.is_quoted_string()): - raise dns.exception.SyntaxError("expecting a string") - if max_length and len(token.value) > max_length: - raise dns.exception.SyntaxError("string too long") - return token.value - - def get_identifier(self) -> str: - """Read the next token, which should be an identifier. - - Raises dns.exception.SyntaxError if not an identifier. - - Returns a string. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - return token.value - - def get_remaining(self, max_tokens: Optional[int] = None) -> List[Token]: - """Return the remaining tokens on the line, until an EOL or EOF is seen. - - max_tokens: If not None, stop after this number of tokens. - - Returns a list of tokens. - """ - - tokens = [] - while True: - token = self.get() - if token.is_eol_or_eof(): - self.unget(token) - break - tokens.append(token) - if len(tokens) == max_tokens: - break - return tokens - - def concatenate_remaining_identifiers(self, allow_empty: bool = False) -> str: - """Read the remaining tokens on the line, which should be identifiers. - - Raises dns.exception.SyntaxError if there are no remaining tokens, - unless `allow_empty=True` is given. - - Raises dns.exception.SyntaxError if a token is seen that is not an - identifier. - - Returns a string containing a concatenation of the remaining - identifiers. - """ - s = "" - while True: - token = self.get().unescape() - if token.is_eol_or_eof(): - self.unget(token) - break - if not token.is_identifier(): - raise dns.exception.SyntaxError - s += token.value - if not (allow_empty or s): - raise dns.exception.SyntaxError("expecting another identifier") - return s - - def as_name( - self, - token: Token, - origin: Optional[dns.name.Name] = None, - relativize: bool = False, - relativize_to: Optional[dns.name.Name] = None, - ) -> dns.name.Name: - """Try to interpret the token as a DNS name. - - Raises dns.exception.SyntaxError if not a name. - - Returns a dns.name.Name. - """ - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - name = dns.name.from_text(token.value, origin, self.idna_codec) - return name.choose_relativity(relativize_to or origin, relativize) - - def get_name( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = False, - relativize_to: Optional[dns.name.Name] = None, - ) -> dns.name.Name: - """Read the next token and interpret it as a DNS name. - - Raises dns.exception.SyntaxError if not a name. - - Returns a dns.name.Name. - """ - - token = self.get() - return self.as_name(token, origin, relativize, relativize_to) - - def get_eol_as_token(self) -> Token: - """Read the next token and raise an exception if it isn't EOL or - EOF. - - Returns a string. - """ - - token = self.get() - if not token.is_eol_or_eof(): - raise dns.exception.SyntaxError( - 'expected EOL or EOF, got %d "%s"' % (token.ttype, token.value) - ) - return token - - def get_eol(self) -> str: - return self.get_eol_as_token().value - - def get_ttl(self) -> int: - """Read the next token and interpret it as a DNS TTL. - - Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an - identifier or badly formed. - - Returns an int. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - return dns.ttl.from_text(token.value) diff --git a/venv/lib/python3.12/site-packages/dns/transaction.py b/venv/lib/python3.12/site-packages/dns/transaction.py deleted file mode 100644 index aa2e1160..00000000 --- a/venv/lib/python3.12/site-packages/dns/transaction.py +++ /dev/null @@ -1,649 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import collections -from typing import Any, Callable, Iterator, List, Optional, Tuple, Union - -import dns.exception -import dns.name -import dns.node -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rrset -import dns.serial -import dns.ttl - - -class TransactionManager: - def reader(self) -> "Transaction": - """Begin a read-only transaction.""" - raise NotImplementedError # pragma: no cover - - def writer(self, replacement: bool = False) -> "Transaction": - """Begin a writable transaction. - - *replacement*, a ``bool``. If `True`, the content of the - transaction completely replaces any prior content. If False, - the default, then the content of the transaction updates the - existing content. - """ - raise NotImplementedError # pragma: no cover - - def origin_information( - self, - ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: - """Returns a tuple - - (absolute_origin, relativize, effective_origin) - - giving the absolute name of the default origin for any - relative domain names, the "effective origin", and whether - names should be relativized. The "effective origin" is the - absolute origin if relativize is False, and the empty name if - relativize is true. (The effective origin is provided even - though it can be computed from the absolute_origin and - relativize setting because it avoids a lot of code - duplication.) - - If the returned names are `None`, then no origin information is - available. - - This information is used by code working with transactions to - allow it to coordinate relativization. The transaction code - itself takes what it gets (i.e. does not change name - relativity). - - """ - raise NotImplementedError # pragma: no cover - - def get_class(self) -> dns.rdataclass.RdataClass: - """The class of the transaction manager.""" - raise NotImplementedError # pragma: no cover - - def from_wire_origin(self) -> Optional[dns.name.Name]: - """Origin to use in from_wire() calls.""" - (absolute_origin, relativize, _) = self.origin_information() - if relativize: - return absolute_origin - else: - return None - - -class DeleteNotExact(dns.exception.DNSException): - """Existing data did not match data specified by an exact delete.""" - - -class ReadOnly(dns.exception.DNSException): - """Tried to write to a read-only transaction.""" - - -class AlreadyEnded(dns.exception.DNSException): - """Tried to use an already-ended transaction.""" - - -def _ensure_immutable_rdataset(rdataset): - if rdataset is None or isinstance(rdataset, dns.rdataset.ImmutableRdataset): - return rdataset - return dns.rdataset.ImmutableRdataset(rdataset) - - -def _ensure_immutable_node(node): - if node is None or node.is_immutable(): - return node - return dns.node.ImmutableNode(node) - - -CheckPutRdatasetType = Callable[ - ["Transaction", dns.name.Name, dns.rdataset.Rdataset], None -] -CheckDeleteRdatasetType = Callable[ - ["Transaction", dns.name.Name, dns.rdatatype.RdataType, dns.rdatatype.RdataType], - None, -] -CheckDeleteNameType = Callable[["Transaction", dns.name.Name], None] - - -class Transaction: - def __init__( - self, - manager: TransactionManager, - replacement: bool = False, - read_only: bool = False, - ): - self.manager = manager - self.replacement = replacement - self.read_only = read_only - self._ended = False - self._check_put_rdataset: List[CheckPutRdatasetType] = [] - self._check_delete_rdataset: List[CheckDeleteRdatasetType] = [] - self._check_delete_name: List[CheckDeleteNameType] = [] - - # - # This is the high level API - # - # Note that we currently use non-immutable types in the return type signature to - # avoid covariance problems, e.g. if the caller has a List[Rdataset], mypy will be - # unhappy if we return an ImmutableRdataset. - - def get( - self, - name: Optional[Union[dns.name.Name, str]], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> dns.rdataset.Rdataset: - """Return the rdataset associated with *name*, *rdtype*, and *covers*, - or `None` if not found. - - Note that the returned rdataset is immutable. - """ - self._check_ended() - if isinstance(name, str): - name = dns.name.from_text(name, None) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - rdataset = self._get_rdataset(name, rdtype, covers) - return _ensure_immutable_rdataset(rdataset) - - def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: - """Return the node at *name*, if any. - - Returns an immutable node or ``None``. - """ - return _ensure_immutable_node(self._get_node(name)) - - def _check_read_only(self) -> None: - if self.read_only: - raise ReadOnly - - def add(self, *args: Any) -> None: - """Add records. - - The arguments may be: - - - rrset - - - name, rdataset... - - - name, ttl, rdata... - """ - self._check_ended() - self._check_read_only() - self._add(False, args) - - def replace(self, *args: Any) -> None: - """Replace the existing rdataset at the name with the specified - rdataset, or add the specified rdataset if there was no existing - rdataset. - - The arguments may be: - - - rrset - - - name, rdataset... - - - name, ttl, rdata... - - Note that if you want to replace the entire node, you should do - a delete of the name followed by one or more calls to add() or - replace(). - """ - self._check_ended() - self._check_read_only() - self._add(True, args) - - def delete(self, *args: Any) -> None: - """Delete records. - - It is not an error if some of the records are not in the existing - set. - - The arguments may be: - - - rrset - - - name - - - name, rdatatype, [covers] - - - name, rdataset... - - - name, rdata... - """ - self._check_ended() - self._check_read_only() - self._delete(False, args) - - def delete_exact(self, *args: Any) -> None: - """Delete records. - - The arguments may be: - - - rrset - - - name - - - name, rdatatype, [covers] - - - name, rdataset... - - - name, rdata... - - Raises dns.transaction.DeleteNotExact if some of the records - are not in the existing set. - - """ - self._check_ended() - self._check_read_only() - self._delete(True, args) - - def name_exists(self, name: Union[dns.name.Name, str]) -> bool: - """Does the specified name exist?""" - self._check_ended() - if isinstance(name, str): - name = dns.name.from_text(name, None) - return self._name_exists(name) - - def update_serial( - self, - value: int = 1, - relative: bool = True, - name: dns.name.Name = dns.name.empty, - ) -> None: - """Update the serial number. - - *value*, an `int`, is an increment if *relative* is `True`, or the - actual value to set if *relative* is `False`. - - Raises `KeyError` if there is no SOA rdataset at *name*. - - Raises `ValueError` if *value* is negative or if the increment is - so large that it would cause the new serial to be less than the - prior value. - """ - self._check_ended() - if value < 0: - raise ValueError("negative update_serial() value") - if isinstance(name, str): - name = dns.name.from_text(name, None) - rdataset = self._get_rdataset(name, dns.rdatatype.SOA, dns.rdatatype.NONE) - if rdataset is None or len(rdataset) == 0: - raise KeyError - if relative: - serial = dns.serial.Serial(rdataset[0].serial) + value - else: - serial = dns.serial.Serial(value) - serial = serial.value # convert back to int - if serial == 0: - serial = 1 - rdata = rdataset[0].replace(serial=serial) - new_rdataset = dns.rdataset.from_rdata(rdataset.ttl, rdata) - self.replace(name, new_rdataset) - - def __iter__(self): - self._check_ended() - return self._iterate_rdatasets() - - def changed(self) -> bool: - """Has this transaction changed anything? - - For read-only transactions, the result is always `False`. - - For writable transactions, the result is `True` if at some time - during the life of the transaction, the content was changed. - """ - self._check_ended() - return self._changed() - - def commit(self) -> None: - """Commit the transaction. - - Normally transactions are used as context managers and commit - or rollback automatically, but it may be done explicitly if needed. - A ``dns.transaction.Ended`` exception will be raised if you try - to use a transaction after it has been committed or rolled back. - - Raises an exception if the commit fails (in which case the transaction - is also rolled back. - """ - self._end(True) - - def rollback(self) -> None: - """Rollback the transaction. - - Normally transactions are used as context managers and commit - or rollback automatically, but it may be done explicitly if needed. - A ``dns.transaction.AlreadyEnded`` exception will be raised if you try - to use a transaction after it has been committed or rolled back. - - Rollback cannot otherwise fail. - """ - self._end(False) - - def check_put_rdataset(self, check: CheckPutRdatasetType) -> None: - """Call *check* before putting (storing) an rdataset. - - The function is called with the transaction, the name, and the rdataset. - - The check function may safely make non-mutating transaction method - calls, but behavior is undefined if mutating transaction methods are - called. The check function should raise an exception if it objects to - the put, and otherwise should return ``None``. - """ - self._check_put_rdataset.append(check) - - def check_delete_rdataset(self, check: CheckDeleteRdatasetType) -> None: - """Call *check* before deleting an rdataset. - - The function is called with the transaction, the name, the rdatatype, - and the covered rdatatype. - - The check function may safely make non-mutating transaction method - calls, but behavior is undefined if mutating transaction methods are - called. The check function should raise an exception if it objects to - the put, and otherwise should return ``None``. - """ - self._check_delete_rdataset.append(check) - - def check_delete_name(self, check: CheckDeleteNameType) -> None: - """Call *check* before putting (storing) an rdataset. - - The function is called with the transaction and the name. - - The check function may safely make non-mutating transaction method - calls, but behavior is undefined if mutating transaction methods are - called. The check function should raise an exception if it objects to - the put, and otherwise should return ``None``. - """ - self._check_delete_name.append(check) - - def iterate_rdatasets( - self, - ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: - """Iterate all the rdatasets in the transaction, returning - (`dns.name.Name`, `dns.rdataset.Rdataset`) tuples. - - Note that as is usual with python iterators, adding or removing items - while iterating will invalidate the iterator and may raise `RuntimeError` - or fail to iterate over all entries.""" - self._check_ended() - return self._iterate_rdatasets() - - def iterate_names(self) -> Iterator[dns.name.Name]: - """Iterate all the names in the transaction. - - Note that as is usual with python iterators, adding or removing names - while iterating will invalidate the iterator and may raise `RuntimeError` - or fail to iterate over all entries.""" - self._check_ended() - return self._iterate_names() - - # - # Helper methods - # - - def _raise_if_not_empty(self, method, args): - if len(args) != 0: - raise TypeError(f"extra parameters to {method}") - - def _rdataset_from_args(self, method, deleting, args): - try: - arg = args.popleft() - if isinstance(arg, dns.rrset.RRset): - rdataset = arg.to_rdataset() - elif isinstance(arg, dns.rdataset.Rdataset): - rdataset = arg - else: - if deleting: - ttl = 0 - else: - if isinstance(arg, int): - ttl = arg - if ttl > dns.ttl.MAX_TTL: - raise ValueError(f"{method}: TTL value too big") - else: - raise TypeError(f"{method}: expected a TTL") - arg = args.popleft() - if isinstance(arg, dns.rdata.Rdata): - rdataset = dns.rdataset.from_rdata(ttl, arg) - else: - raise TypeError(f"{method}: expected an Rdata") - return rdataset - except IndexError: - if deleting: - return None - else: - # reraise - raise TypeError(f"{method}: expected more arguments") - - def _add(self, replace, args): - try: - args = collections.deque(args) - if replace: - method = "replace()" - else: - method = "add()" - arg = args.popleft() - if isinstance(arg, str): - arg = dns.name.from_text(arg, None) - if isinstance(arg, dns.name.Name): - name = arg - rdataset = self._rdataset_from_args(method, False, args) - elif isinstance(arg, dns.rrset.RRset): - rrset = arg - name = rrset.name - # rrsets are also rdatasets, but they don't print the - # same and can't be stored in nodes, so convert. - rdataset = rrset.to_rdataset() - else: - raise TypeError( - f"{method} requires a name or RRset as the first argument" - ) - if rdataset.rdclass != self.manager.get_class(): - raise ValueError(f"{method} has objects of wrong RdataClass") - if rdataset.rdtype == dns.rdatatype.SOA: - (_, _, origin) = self._origin_information() - if name != origin: - raise ValueError(f"{method} has non-origin SOA") - self._raise_if_not_empty(method, args) - if not replace: - existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) - if existing is not None: - if isinstance(existing, dns.rdataset.ImmutableRdataset): - trds = dns.rdataset.Rdataset( - existing.rdclass, existing.rdtype, existing.covers - ) - trds.update(existing) - existing = trds - rdataset = existing.union(rdataset) - self._checked_put_rdataset(name, rdataset) - except IndexError: - raise TypeError(f"not enough parameters to {method}") - - def _delete(self, exact, args): - try: - args = collections.deque(args) - if exact: - method = "delete_exact()" - else: - method = "delete()" - arg = args.popleft() - if isinstance(arg, str): - arg = dns.name.from_text(arg, None) - if isinstance(arg, dns.name.Name): - name = arg - if len(args) > 0 and ( - isinstance(args[0], int) or isinstance(args[0], str) - ): - # deleting by type and (optionally) covers - rdtype = dns.rdatatype.RdataType.make(args.popleft()) - if len(args) > 0: - covers = dns.rdatatype.RdataType.make(args.popleft()) - else: - covers = dns.rdatatype.NONE - self._raise_if_not_empty(method, args) - existing = self._get_rdataset(name, rdtype, covers) - if existing is None: - if exact: - raise DeleteNotExact(f"{method}: missing rdataset") - else: - self._checked_delete_rdataset(name, rdtype, covers) - return - else: - rdataset = self._rdataset_from_args(method, True, args) - elif isinstance(arg, dns.rrset.RRset): - rdataset = arg # rrsets are also rdatasets - name = rdataset.name - else: - raise TypeError( - f"{method} requires a name or RRset as the first argument" - ) - self._raise_if_not_empty(method, args) - if rdataset: - if rdataset.rdclass != self.manager.get_class(): - raise ValueError(f"{method} has objects of wrong RdataClass") - existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) - if existing is not None: - if exact: - intersection = existing.intersection(rdataset) - if intersection != rdataset: - raise DeleteNotExact(f"{method}: missing rdatas") - rdataset = existing.difference(rdataset) - if len(rdataset) == 0: - self._checked_delete_rdataset( - name, rdataset.rdtype, rdataset.covers - ) - else: - self._checked_put_rdataset(name, rdataset) - elif exact: - raise DeleteNotExact(f"{method}: missing rdataset") - else: - if exact and not self._name_exists(name): - raise DeleteNotExact(f"{method}: name not known") - self._checked_delete_name(name) - except IndexError: - raise TypeError(f"not enough parameters to {method}") - - def _check_ended(self): - if self._ended: - raise AlreadyEnded - - def _end(self, commit): - self._check_ended() - try: - self._end_transaction(commit) - finally: - self._ended = True - - def _checked_put_rdataset(self, name, rdataset): - for check in self._check_put_rdataset: - check(self, name, rdataset) - self._put_rdataset(name, rdataset) - - def _checked_delete_rdataset(self, name, rdtype, covers): - for check in self._check_delete_rdataset: - check(self, name, rdtype, covers) - self._delete_rdataset(name, rdtype, covers) - - def _checked_delete_name(self, name): - for check in self._check_delete_name: - check(self, name) - self._delete_name(name) - - # - # Transactions are context managers. - # - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if not self._ended: - if exc_type is None: - self.commit() - else: - self.rollback() - return False - - # - # This is the low level API, which must be implemented by subclasses - # of Transaction. - # - - def _get_rdataset(self, name, rdtype, covers): - """Return the rdataset associated with *name*, *rdtype*, and *covers*, - or `None` if not found. - """ - raise NotImplementedError # pragma: no cover - - def _put_rdataset(self, name, rdataset): - """Store the rdataset.""" - raise NotImplementedError # pragma: no cover - - def _delete_name(self, name): - """Delete all data associated with *name*. - - It is not an error if the name does not exist. - """ - raise NotImplementedError # pragma: no cover - - def _delete_rdataset(self, name, rdtype, covers): - """Delete all data associated with *name*, *rdtype*, and *covers*. - - It is not an error if the rdataset does not exist. - """ - raise NotImplementedError # pragma: no cover - - def _name_exists(self, name): - """Does name exist? - - Returns a bool. - """ - raise NotImplementedError # pragma: no cover - - def _changed(self): - """Has this transaction changed anything?""" - raise NotImplementedError # pragma: no cover - - def _end_transaction(self, commit): - """End the transaction. - - *commit*, a bool. If ``True``, commit the transaction, otherwise - roll it back. - - If committing and the commit fails, then roll back and raise an - exception. - """ - raise NotImplementedError # pragma: no cover - - def _set_origin(self, origin): - """Set the origin. - - This method is called when reading a possibly relativized - source, and an origin setting operation occurs (e.g. $ORIGIN - in a zone file). - """ - raise NotImplementedError # pragma: no cover - - def _iterate_rdatasets(self): - """Return an iterator that yields (name, rdataset) tuples.""" - raise NotImplementedError # pragma: no cover - - def _iterate_names(self): - """Return an iterator that yields a name.""" - raise NotImplementedError # pragma: no cover - - def _get_node(self, name): - """Return the node at *name*, if any. - - Returns a node or ``None``. - """ - raise NotImplementedError # pragma: no cover - - # - # Low-level API with a default implementation, in case a subclass needs - # to override. - # - - def _origin_information(self): - # This is only used by _add() - return self.manager.origin_information() diff --git a/venv/lib/python3.12/site-packages/dns/tsig.py b/venv/lib/python3.12/site-packages/dns/tsig.py deleted file mode 100644 index 780852e8..00000000 --- a/venv/lib/python3.12/site-packages/dns/tsig.py +++ /dev/null @@ -1,352 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS TSIG support.""" - -import base64 -import hashlib -import hmac -import struct - -import dns.exception -import dns.name -import dns.rcode -import dns.rdataclass - - -class BadTime(dns.exception.DNSException): - """The current time is not within the TSIG's validity time.""" - - -class BadSignature(dns.exception.DNSException): - """The TSIG signature fails to verify.""" - - -class BadKey(dns.exception.DNSException): - """The TSIG record owner name does not match the key.""" - - -class BadAlgorithm(dns.exception.DNSException): - """The TSIG algorithm does not match the key.""" - - -class PeerError(dns.exception.DNSException): - """Base class for all TSIG errors generated by the remote peer""" - - -class PeerBadKey(PeerError): - """The peer didn't know the key we used""" - - -class PeerBadSignature(PeerError): - """The peer didn't like the signature we sent""" - - -class PeerBadTime(PeerError): - """The peer didn't like the time we sent""" - - -class PeerBadTruncation(PeerError): - """The peer didn't like amount of truncation in the TSIG we sent""" - - -# TSIG Algorithms - -HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") -HMAC_SHA1 = dns.name.from_text("hmac-sha1") -HMAC_SHA224 = dns.name.from_text("hmac-sha224") -HMAC_SHA256 = dns.name.from_text("hmac-sha256") -HMAC_SHA256_128 = dns.name.from_text("hmac-sha256-128") -HMAC_SHA384 = dns.name.from_text("hmac-sha384") -HMAC_SHA384_192 = dns.name.from_text("hmac-sha384-192") -HMAC_SHA512 = dns.name.from_text("hmac-sha512") -HMAC_SHA512_256 = dns.name.from_text("hmac-sha512-256") -GSS_TSIG = dns.name.from_text("gss-tsig") - -default_algorithm = HMAC_SHA256 - -mac_sizes = { - HMAC_SHA1: 20, - HMAC_SHA224: 28, - HMAC_SHA256: 32, - HMAC_SHA256_128: 16, - HMAC_SHA384: 48, - HMAC_SHA384_192: 24, - HMAC_SHA512: 64, - HMAC_SHA512_256: 32, - HMAC_MD5: 16, - GSS_TSIG: 128, # This is what we assume to be the worst case! -} - - -class GSSTSig: - """ - GSS-TSIG TSIG implementation. This uses the GSS-API context established - in the TKEY message handshake to sign messages using GSS-API message - integrity codes, per the RFC. - - In order to avoid a direct GSSAPI dependency, the keyring holds a ref - to the GSSAPI object required, rather than the key itself. - """ - - def __init__(self, gssapi_context): - self.gssapi_context = gssapi_context - self.data = b"" - self.name = "gss-tsig" - - def update(self, data): - self.data += data - - def sign(self): - # defer to the GSSAPI function to sign - return self.gssapi_context.get_signature(self.data) - - def verify(self, expected): - try: - # defer to the GSSAPI function to verify - return self.gssapi_context.verify_signature(self.data, expected) - except Exception: - # note the usage of a bare exception - raise BadSignature - - -class GSSTSigAdapter: - def __init__(self, keyring): - self.keyring = keyring - - def __call__(self, message, keyname): - if keyname in self.keyring: - key = self.keyring[keyname] - if isinstance(key, Key) and key.algorithm == GSS_TSIG: - if message: - GSSTSigAdapter.parse_tkey_and_step(key, message, keyname) - return key - else: - return None - - @classmethod - def parse_tkey_and_step(cls, key, message, keyname): - # if the message is a TKEY type, absorb the key material - # into the context using step(); this is used to allow the - # client to complete the GSSAPI negotiation before attempting - # to verify the signed response to a TKEY message exchange - try: - rrset = message.find_rrset( - message.answer, keyname, dns.rdataclass.ANY, dns.rdatatype.TKEY - ) - if rrset: - token = rrset[0].key - gssapi_context = key.secret - return gssapi_context.step(token) - except KeyError: - pass - - -class HMACTSig: - """ - HMAC TSIG implementation. This uses the HMAC python module to handle the - sign/verify operations. - """ - - _hashes = { - HMAC_SHA1: hashlib.sha1, - HMAC_SHA224: hashlib.sha224, - HMAC_SHA256: hashlib.sha256, - HMAC_SHA256_128: (hashlib.sha256, 128), - HMAC_SHA384: hashlib.sha384, - HMAC_SHA384_192: (hashlib.sha384, 192), - HMAC_SHA512: hashlib.sha512, - HMAC_SHA512_256: (hashlib.sha512, 256), - HMAC_MD5: hashlib.md5, - } - - def __init__(self, key, algorithm): - try: - hashinfo = self._hashes[algorithm] - except KeyError: - raise NotImplementedError(f"TSIG algorithm {algorithm} is not supported") - - # create the HMAC context - if isinstance(hashinfo, tuple): - self.hmac_context = hmac.new(key, digestmod=hashinfo[0]) - self.size = hashinfo[1] - else: - self.hmac_context = hmac.new(key, digestmod=hashinfo) - self.size = None - self.name = self.hmac_context.name - if self.size: - self.name += f"-{self.size}" - - def update(self, data): - return self.hmac_context.update(data) - - def sign(self): - # defer to the HMAC digest() function for that digestmod - digest = self.hmac_context.digest() - if self.size: - digest = digest[: (self.size // 8)] - return digest - - def verify(self, expected): - # re-digest and compare the results - mac = self.sign() - if not hmac.compare_digest(mac, expected): - raise BadSignature - - -def _digest(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=None): - """Return a context containing the TSIG rdata for the input parameters - @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object - @raises ValueError: I{other_data} is too long - @raises NotImplementedError: I{algorithm} is not supported - """ - - first = not (ctx and multi) - if first: - ctx = get_context(key) - if request_mac: - ctx.update(struct.pack("!H", len(request_mac))) - ctx.update(request_mac) - ctx.update(struct.pack("!H", rdata.original_id)) - ctx.update(wire[2:]) - if first: - ctx.update(key.name.to_digestable()) - ctx.update(struct.pack("!H", dns.rdataclass.ANY)) - ctx.update(struct.pack("!I", 0)) - if time is None: - time = rdata.time_signed - upper_time = (time >> 32) & 0xFFFF - lower_time = time & 0xFFFFFFFF - time_encoded = struct.pack("!HIH", upper_time, lower_time, rdata.fudge) - other_len = len(rdata.other) - if other_len > 65535: - raise ValueError("TSIG Other Data is > 65535 bytes") - if first: - ctx.update(key.algorithm.to_digestable() + time_encoded) - ctx.update(struct.pack("!HH", rdata.error, other_len) + rdata.other) - else: - ctx.update(time_encoded) - return ctx - - -def _maybe_start_digest(key, mac, multi): - """If this is the first message in a multi-message sequence, - start a new context. - @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object - """ - if multi: - ctx = get_context(key) - ctx.update(struct.pack("!H", len(mac))) - ctx.update(mac) - return ctx - else: - return None - - -def sign(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=False): - """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata - for the input parameters, the HMAC MAC calculated by applying the - TSIG signature algorithm, and the TSIG digest context. - @rtype: (string, dns.tsig.HMACTSig or dns.tsig.GSSTSig object) - @raises ValueError: I{other_data} is too long - @raises NotImplementedError: I{algorithm} is not supported - """ - - ctx = _digest(wire, key, rdata, time, request_mac, ctx, multi) - mac = ctx.sign() - tsig = rdata.replace(time_signed=time, mac=mac) - - return (tsig, _maybe_start_digest(key, mac, multi)) - - -def validate( - wire, key, owner, rdata, now, request_mac, tsig_start, ctx=None, multi=False -): - """Validate the specified TSIG rdata against the other input parameters. - - @raises FormError: The TSIG is badly formed. - @raises BadTime: There is too much time skew between the client and the - server. - @raises BadSignature: The TSIG signature did not validate - @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object""" - - (adcount,) = struct.unpack("!H", wire[10:12]) - if adcount == 0: - raise dns.exception.FormError - adcount -= 1 - new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] - if rdata.error != 0: - if rdata.error == dns.rcode.BADSIG: - raise PeerBadSignature - elif rdata.error == dns.rcode.BADKEY: - raise PeerBadKey - elif rdata.error == dns.rcode.BADTIME: - raise PeerBadTime - elif rdata.error == dns.rcode.BADTRUNC: - raise PeerBadTruncation - else: - raise PeerError("unknown TSIG error code %d" % rdata.error) - if abs(rdata.time_signed - now) > rdata.fudge: - raise BadTime - if key.name != owner: - raise BadKey - if key.algorithm != rdata.algorithm: - raise BadAlgorithm - ctx = _digest(new_wire, key, rdata, None, request_mac, ctx, multi) - ctx.verify(rdata.mac) - return _maybe_start_digest(key, rdata.mac, multi) - - -def get_context(key): - """Returns an HMAC context for the specified key. - - @rtype: HMAC context - @raises NotImplementedError: I{algorithm} is not supported - """ - - if key.algorithm == GSS_TSIG: - return GSSTSig(key.secret) - else: - return HMACTSig(key.secret, key.algorithm) - - -class Key: - def __init__(self, name, secret, algorithm=default_algorithm): - if isinstance(name, str): - name = dns.name.from_text(name) - self.name = name - if isinstance(secret, str): - secret = base64.decodebytes(secret.encode()) - self.secret = secret - if isinstance(algorithm, str): - algorithm = dns.name.from_text(algorithm) - self.algorithm = algorithm - - def __eq__(self, other): - return ( - isinstance(other, Key) - and self.name == other.name - and self.secret == other.secret - and self.algorithm == other.algorithm - ) - - def __repr__(self): - r = f" Dict[dns.name.Name, dns.tsig.Key]: - """Convert a dictionary containing (textual DNS name, base64 secret) - pairs into a binary keyring which has (dns.name.Name, bytes) pairs, or - a dictionary containing (textual DNS name, (algorithm, base64 secret)) - pairs into a binary keyring which has (dns.name.Name, dns.tsig.Key) pairs. - @rtype: dict""" - - keyring = {} - for name, value in textring.items(): - kname = dns.name.from_text(name) - if isinstance(value, str): - keyring[kname] = dns.tsig.Key(kname, value).secret - else: - (algorithm, secret) = value - keyring[kname] = dns.tsig.Key(kname, secret, algorithm) - return keyring - - -def to_text(keyring: Dict[dns.name.Name, Any]) -> Dict[str, Any]: - """Convert a dictionary containing (dns.name.Name, dns.tsig.Key) pairs - into a text keyring which has (textual DNS name, (textual algorithm, - base64 secret)) pairs, or a dictionary containing (dns.name.Name, bytes) - pairs into a text keyring which has (textual DNS name, base64 secret) pairs. - @rtype: dict""" - - textring = {} - - def b64encode(secret): - return base64.encodebytes(secret).decode().rstrip() - - for name, key in keyring.items(): - tname = name.to_text() - if isinstance(key, bytes): - textring[tname] = b64encode(key) - else: - if isinstance(key.secret, bytes): - text_secret = b64encode(key.secret) - else: - text_secret = str(key.secret) - - textring[tname] = (key.algorithm.to_text(), text_secret) - return textring diff --git a/venv/lib/python3.12/site-packages/dns/ttl.py b/venv/lib/python3.12/site-packages/dns/ttl.py deleted file mode 100644 index b9a99fe3..00000000 --- a/venv/lib/python3.12/site-packages/dns/ttl.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS TTL conversion.""" - -from typing import Union - -import dns.exception - -# Technically TTLs are supposed to be between 0 and 2**31 - 1, with values -# greater than that interpreted as 0, but we do not impose this policy here -# as values > 2**31 - 1 occur in real world data. -# -# We leave it to applications to impose tighter bounds if desired. -MAX_TTL = 2**32 - 1 - - -class BadTTL(dns.exception.SyntaxError): - """DNS TTL value is not well-formed.""" - - -def from_text(text: str) -> int: - """Convert the text form of a TTL to an integer. - - The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. - - *text*, a ``str``, the textual TTL. - - Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. - - Returns an ``int``. - """ - - if text.isdigit(): - total = int(text) - elif len(text) == 0: - raise BadTTL - else: - total = 0 - current = 0 - need_digit = True - for c in text: - if c.isdigit(): - current *= 10 - current += int(c) - need_digit = False - else: - if need_digit: - raise BadTTL - c = c.lower() - if c == "w": - total += current * 604800 - elif c == "d": - total += current * 86400 - elif c == "h": - total += current * 3600 - elif c == "m": - total += current * 60 - elif c == "s": - total += current - else: - raise BadTTL(f"unknown unit '{c}'") - current = 0 - need_digit = True - if not current == 0: - raise BadTTL("trailing integer") - if total < 0 or total > MAX_TTL: - raise BadTTL("TTL should be between 0 and 2**32 - 1 (inclusive)") - return total - - -def make(value: Union[int, str]) -> int: - if isinstance(value, int): - return value - elif isinstance(value, str): - return dns.ttl.from_text(value) - else: - raise ValueError("cannot convert value to TTL") diff --git a/venv/lib/python3.12/site-packages/dns/update.py b/venv/lib/python3.12/site-packages/dns/update.py deleted file mode 100644 index bf1157ac..00000000 --- a/venv/lib/python3.12/site-packages/dns/update.py +++ /dev/null @@ -1,386 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Dynamic Update Support""" - -from typing import Any, List, Optional, Union - -import dns.message -import dns.name -import dns.opcode -import dns.rdata -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.tsig - - -class UpdateSection(dns.enum.IntEnum): - """Update sections""" - - ZONE = 0 - PREREQ = 1 - UPDATE = 2 - ADDITIONAL = 3 - - @classmethod - def _maximum(cls): - return 3 - - -class UpdateMessage(dns.message.Message): # lgtm[py/missing-equals] - # ignore the mypy error here as we mean to use a different enum - _section_enum = UpdateSection # type: ignore - - def __init__( - self, - zone: Optional[Union[dns.name.Name, str]] = None, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - keyring: Optional[Any] = None, - keyname: Optional[dns.name.Name] = None, - keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, - id: Optional[int] = None, - ): - """Initialize a new DNS Update object. - - See the documentation of the Message class for a complete - description of the keyring dictionary. - - *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone - which is being updated. ``None`` should only be used by dnspython's - message constructors, as a zone is required for the convenience - methods like ``add()``, ``replace()``, etc. - - *rdclass*, an ``int`` or ``str``, the class of the zone. - - The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to - ``use_tsig()``; see its documentation for details. - """ - super().__init__(id=id) - self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) - if isinstance(zone, str): - zone = dns.name.from_text(zone) - self.origin = zone - rdclass = dns.rdataclass.RdataClass.make(rdclass) - self.zone_rdclass = rdclass - if self.origin: - self.find_rrset( - self.zone, - self.origin, - rdclass, - dns.rdatatype.SOA, - create=True, - force_unique=True, - ) - if keyring is not None: - self.use_tsig(keyring, keyname, algorithm=keyalgorithm) - - @property - def zone(self) -> List[dns.rrset.RRset]: - """The zone section.""" - return self.sections[0] - - @zone.setter - def zone(self, v): - self.sections[0] = v - - @property - def prerequisite(self) -> List[dns.rrset.RRset]: - """The prerequisite section.""" - return self.sections[1] - - @prerequisite.setter - def prerequisite(self, v): - self.sections[1] = v - - @property - def update(self) -> List[dns.rrset.RRset]: - """The update section.""" - return self.sections[2] - - @update.setter - def update(self, v): - self.sections[2] = v - - def _add_rr(self, name, ttl, rd, deleting=None, section=None): - """Add a single RR to the update section.""" - - if section is None: - section = self.update - covers = rd.covers() - rrset = self.find_rrset( - section, name, self.zone_rdclass, rd.rdtype, covers, deleting, True, True - ) - rrset.add(rd, ttl) - - def _add(self, replace, section, name, *args): - """Add records. - - *replace* is the replacement mode. If ``False``, - RRs are added to an existing RRset; if ``True``, the RRset - is replaced with the specified contents. The second - argument is the section to add to. The third argument - is always a name. The other arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if isinstance(args[0], dns.rdataset.Rdataset): - for rds in args: - if replace: - self.delete(name, rds.rdtype) - for rd in rds: - self._add_rr(name, rds.ttl, rd, section=section) - else: - args = list(args) - ttl = int(args.pop(0)) - if isinstance(args[0], dns.rdata.Rdata): - if replace: - self.delete(name, args[0].rdtype) - for rd in args: - self._add_rr(name, ttl, rd, section=section) - else: - rdtype = dns.rdatatype.RdataType.make(args.pop(0)) - if replace: - self.delete(name, rdtype) - for s in args: - rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, self.origin) - self._add_rr(name, ttl, rd, section=section) - - def add(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Add records. - - The first argument is always a name. The other - arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - """ - - self._add(False, self.update, name, *args) - - def delete(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Delete records. - - The first argument is always a name. The other - arguments can be: - - - *empty* - - - rdataset... - - - rdata... - - - rdtype, [string...] - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if len(args) == 0: - self.find_rrset( - self.update, - name, - dns.rdataclass.ANY, - dns.rdatatype.ANY, - dns.rdatatype.NONE, - dns.rdataclass.ANY, - True, - True, - ) - elif isinstance(args[0], dns.rdataset.Rdataset): - for rds in args: - for rd in rds: - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - else: - largs = list(args) - if isinstance(largs[0], dns.rdata.Rdata): - for rd in largs: - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - else: - rdtype = dns.rdatatype.RdataType.make(largs.pop(0)) - if len(largs) == 0: - self.find_rrset( - self.update, - name, - self.zone_rdclass, - rdtype, - dns.rdatatype.NONE, - dns.rdataclass.ANY, - True, - True, - ) - else: - for s in largs: - rd = dns.rdata.from_text( - self.zone_rdclass, - rdtype, - s, # type: ignore[arg-type] - self.origin, - ) - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - - def replace(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Replace records. - - The first argument is always a name. The other - arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - - Note that if you want to replace the entire node, you should do - a delete of the name followed by one or more calls to add. - """ - - self._add(True, self.update, name, *args) - - def present(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Require that an owner name (and optionally an rdata type, - or specific rdataset) exists as a prerequisite to the - execution of the update. - - The first argument is always a name. - The other arguments can be: - - - rdataset... - - - rdata... - - - rdtype, string... - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if len(args) == 0: - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.ANY, - dns.rdatatype.ANY, - dns.rdatatype.NONE, - None, - True, - True, - ) - elif ( - isinstance(args[0], dns.rdataset.Rdataset) - or isinstance(args[0], dns.rdata.Rdata) - or len(args) > 1 - ): - if not isinstance(args[0], dns.rdataset.Rdataset): - # Add a 0 TTL - largs = list(args) - largs.insert(0, 0) # type: ignore[arg-type] - self._add(False, self.prerequisite, name, *largs) - else: - self._add(False, self.prerequisite, name, *args) - else: - rdtype = dns.rdatatype.RdataType.make(args[0]) - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.ANY, - rdtype, - dns.rdatatype.NONE, - None, - True, - True, - ) - - def absent( - self, - name: Union[dns.name.Name, str], - rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, - ) -> None: - """Require that an owner name (and optionally an rdata type) does - not exist as a prerequisite to the execution of the update.""" - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if rdtype is None: - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.NONE, - dns.rdatatype.ANY, - dns.rdatatype.NONE, - None, - True, - True, - ) - else: - rdtype = dns.rdatatype.RdataType.make(rdtype) - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.NONE, - rdtype, - dns.rdatatype.NONE, - None, - True, - True, - ) - - def _get_one_rr_per_rrset(self, value): - # Updates are always one_rr_per_rrset - return True - - def _parse_rr_header(self, section, name, rdclass, rdtype): - deleting = None - empty = False - if section == UpdateSection.ZONE: - if ( - dns.rdataclass.is_metaclass(rdclass) - or rdtype != dns.rdatatype.SOA - or self.zone - ): - raise dns.exception.FormError - else: - if not self.zone: - raise dns.exception.FormError - if rdclass in (dns.rdataclass.ANY, dns.rdataclass.NONE): - deleting = rdclass - rdclass = self.zone[0].rdclass - empty = ( - deleting == dns.rdataclass.ANY or section == UpdateSection.PREREQ - ) - return (rdclass, rdtype, deleting, empty) - - -# backwards compatibility -Update = UpdateMessage - -### BEGIN generated UpdateSection constants - -ZONE = UpdateSection.ZONE -PREREQ = UpdateSection.PREREQ -UPDATE = UpdateSection.UPDATE -ADDITIONAL = UpdateSection.ADDITIONAL - -### END generated UpdateSection constants diff --git a/venv/lib/python3.12/site-packages/dns/version.py b/venv/lib/python3.12/site-packages/dns/version.py deleted file mode 100644 index 9ed2ce19..00000000 --- a/venv/lib/python3.12/site-packages/dns/version.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""dnspython release version information.""" - -#: MAJOR -MAJOR = 2 -#: MINOR -MINOR = 7 -#: MICRO -MICRO = 0 -#: RELEASELEVEL -RELEASELEVEL = 0x0F -#: SERIAL -SERIAL = 0 - -if RELEASELEVEL == 0x0F: # pragma: no cover lgtm[py/unreachable-statement] - #: version - version = "%d.%d.%d" % (MAJOR, MINOR, MICRO) # lgtm[py/unreachable-statement] -elif RELEASELEVEL == 0x00: # pragma: no cover lgtm[py/unreachable-statement] - version = "%d.%d.%ddev%d" % ( - MAJOR, - MINOR, - MICRO, - SERIAL, - ) # lgtm[py/unreachable-statement] -elif RELEASELEVEL == 0x0C: # pragma: no cover lgtm[py/unreachable-statement] - version = "%d.%d.%drc%d" % ( - MAJOR, - MINOR, - MICRO, - SERIAL, - ) # lgtm[py/unreachable-statement] -else: # pragma: no cover lgtm[py/unreachable-statement] - version = "%d.%d.%d%x%d" % ( - MAJOR, - MINOR, - MICRO, - RELEASELEVEL, - SERIAL, - ) # lgtm[py/unreachable-statement] - -#: hexversion -hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | SERIAL diff --git a/venv/lib/python3.12/site-packages/dns/versioned.py b/venv/lib/python3.12/site-packages/dns/versioned.py deleted file mode 100644 index fd78e674..00000000 --- a/venv/lib/python3.12/site-packages/dns/versioned.py +++ /dev/null @@ -1,318 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""DNS Versioned Zones.""" - -import collections -import threading -from typing import Callable, Deque, Optional, Set, Union - -import dns.exception -import dns.immutable -import dns.name -import dns.node -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rdtypes.ANY.SOA -import dns.zone - - -class UseTransaction(dns.exception.DNSException): - """To alter a versioned zone, use a transaction.""" - - -# Backwards compatibility -Node = dns.zone.VersionedNode -ImmutableNode = dns.zone.ImmutableVersionedNode -Version = dns.zone.Version -WritableVersion = dns.zone.WritableVersion -ImmutableVersion = dns.zone.ImmutableVersion -Transaction = dns.zone.Transaction - - -class Zone(dns.zone.Zone): # lgtm[py/missing-equals] - __slots__ = [ - "_versions", - "_versions_lock", - "_write_txn", - "_write_waiters", - "_write_event", - "_pruning_policy", - "_readers", - ] - - node_factory = Node - - def __init__( - self, - origin: Optional[Union[dns.name.Name, str]], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - pruning_policy: Optional[Callable[["Zone", Version], Optional[bool]]] = None, - ): - """Initialize a versioned zone object. - - *origin* is the origin of the zone. It may be a ``dns.name.Name``, - a ``str``, or ``None``. If ``None``, then the zone's origin will - be set by the first ``$ORIGIN`` line in a zone file. - - *rdclass*, an ``int``, the zone's rdata class; the default is class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - - *pruning policy*, a function taking a ``Zone`` and a ``Version`` and returning - a ``bool``, or ``None``. Should the version be pruned? If ``None``, - the default policy, which retains one version is used. - """ - super().__init__(origin, rdclass, relativize) - self._versions: Deque[Version] = collections.deque() - self._version_lock = threading.Lock() - if pruning_policy is None: - self._pruning_policy = self._default_pruning_policy - else: - self._pruning_policy = pruning_policy - self._write_txn: Optional[Transaction] = None - self._write_event: Optional[threading.Event] = None - self._write_waiters: Deque[threading.Event] = collections.deque() - self._readers: Set[Transaction] = set() - self._commit_version_unlocked( - None, WritableVersion(self, replacement=True), origin - ) - - def reader( - self, id: Optional[int] = None, serial: Optional[int] = None - ) -> Transaction: # pylint: disable=arguments-differ - if id is not None and serial is not None: - raise ValueError("cannot specify both id and serial") - with self._version_lock: - if id is not None: - version = None - for v in reversed(self._versions): - if v.id == id: - version = v - break - if version is None: - raise KeyError("version not found") - elif serial is not None: - if self.relativize: - oname = dns.name.empty - else: - assert self.origin is not None - oname = self.origin - version = None - for v in reversed(self._versions): - n = v.nodes.get(oname) - if n: - rds = n.get_rdataset(self.rdclass, dns.rdatatype.SOA) - if rds and rds[0].serial == serial: - version = v - break - if version is None: - raise KeyError("serial not found") - else: - version = self._versions[-1] - txn = Transaction(self, False, version) - self._readers.add(txn) - return txn - - def writer(self, replacement: bool = False) -> Transaction: - event = None - while True: - with self._version_lock: - # Checking event == self._write_event ensures that either - # no one was waiting before we got lucky and found no write - # txn, or we were the one who was waiting and got woken up. - # This prevents "taking cuts" when creating a write txn. - if self._write_txn is None and event == self._write_event: - # Creating the transaction defers version setup - # (i.e. copying the nodes dictionary) until we - # give up the lock, so that we hold the lock as - # short a time as possible. This is why we call - # _setup_version() below. - self._write_txn = Transaction( - self, replacement, make_immutable=True - ) - # give up our exclusive right to make a Transaction - self._write_event = None - break - # Someone else is writing already, so we will have to - # wait, but we want to do the actual wait outside the - # lock. - event = threading.Event() - self._write_waiters.append(event) - # wait (note we gave up the lock!) - # - # We only wake one sleeper at a time, so it's important - # that no event waiter can exit this method (e.g. via - # cancellation) without returning a transaction or waking - # someone else up. - # - # This is not a problem with Threading module threads as - # they cannot be canceled, but could be an issue with trio - # tasks when we do the async version of writer(). - # I.e. we'd need to do something like: - # - # try: - # event.wait() - # except trio.Cancelled: - # with self._version_lock: - # self._maybe_wakeup_one_waiter_unlocked() - # raise - # - event.wait() - # Do the deferred version setup. - self._write_txn._setup_version() - return self._write_txn - - def _maybe_wakeup_one_waiter_unlocked(self): - if len(self._write_waiters) > 0: - self._write_event = self._write_waiters.popleft() - self._write_event.set() - - # pylint: disable=unused-argument - def _default_pruning_policy(self, zone, version): - return True - - # pylint: enable=unused-argument - - def _prune_versions_unlocked(self): - assert len(self._versions) > 0 - # Don't ever prune a version greater than or equal to one that - # a reader has open. This pins versions in memory while the - # reader is open, and importantly lets the reader open a txn on - # a successor version (e.g. if generating an IXFR). - # - # Note our definition of least_kept also ensures we do not try to - # delete the greatest version. - if len(self._readers) > 0: - least_kept = min(txn.version.id for txn in self._readers) - else: - least_kept = self._versions[-1].id - while self._versions[0].id < least_kept and self._pruning_policy( - self, self._versions[0] - ): - self._versions.popleft() - - def set_max_versions(self, max_versions: Optional[int]) -> None: - """Set a pruning policy that retains up to the specified number - of versions - """ - if max_versions is not None and max_versions < 1: - raise ValueError("max versions must be at least 1") - if max_versions is None: - - def policy(zone, _): # pylint: disable=unused-argument - return False - - else: - - def policy(zone, _): - return len(zone._versions) > max_versions - - self.set_pruning_policy(policy) - - def set_pruning_policy( - self, policy: Optional[Callable[["Zone", Version], Optional[bool]]] - ) -> None: - """Set the pruning policy for the zone. - - The *policy* function takes a `Version` and returns `True` if - the version should be pruned, and `False` otherwise. `None` - may also be specified for policy, in which case the default policy - is used. - - Pruning checking proceeds from the least version and the first - time the function returns `False`, the checking stops. I.e. the - retained versions are always a consecutive sequence. - """ - if policy is None: - policy = self._default_pruning_policy - with self._version_lock: - self._pruning_policy = policy - self._prune_versions_unlocked() - - def _end_read(self, txn): - with self._version_lock: - self._readers.remove(txn) - self._prune_versions_unlocked() - - def _end_write_unlocked(self, txn): - assert self._write_txn == txn - self._write_txn = None - self._maybe_wakeup_one_waiter_unlocked() - - def _end_write(self, txn): - with self._version_lock: - self._end_write_unlocked(txn) - - def _commit_version_unlocked(self, txn, version, origin): - self._versions.append(version) - self._prune_versions_unlocked() - self.nodes = version.nodes - if self.origin is None: - self.origin = origin - # txn can be None in __init__ when we make the empty version. - if txn is not None: - self._end_write_unlocked(txn) - - def _commit_version(self, txn, version, origin): - with self._version_lock: - self._commit_version_unlocked(txn, version, origin) - - def _get_next_version_id(self): - if len(self._versions) > 0: - id = self._versions[-1].id + 1 - else: - id = 1 - return id - - def find_node( - self, name: Union[dns.name.Name, str], create: bool = False - ) -> dns.node.Node: - if create: - raise UseTransaction - return super().find_node(name) - - def delete_node(self, name: Union[dns.name.Name, str]) -> None: - raise UseTransaction - - def find_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - if create: - raise UseTransaction - rdataset = super().find_rdataset(name, rdtype, covers) - return dns.rdataset.ImmutableRdataset(rdataset) - - def get_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - if create: - raise UseTransaction - rdataset = super().get_rdataset(name, rdtype, covers) - if rdataset is not None: - return dns.rdataset.ImmutableRdataset(rdataset) - else: - return None - - def delete_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> None: - raise UseTransaction - - def replace_rdataset( - self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset - ) -> None: - raise UseTransaction diff --git a/venv/lib/python3.12/site-packages/dns/win32util.py b/venv/lib/python3.12/site-packages/dns/win32util.py deleted file mode 100644 index 9ed3f11b..00000000 --- a/venv/lib/python3.12/site-packages/dns/win32util.py +++ /dev/null @@ -1,242 +0,0 @@ -import sys - -import dns._features - -if sys.platform == "win32": - from typing import Any - - import dns.name - - _prefer_wmi = True - - import winreg # pylint: disable=import-error - - # Keep pylint quiet on non-windows. - try: - _ = WindowsError # pylint: disable=used-before-assignment - except NameError: - WindowsError = Exception - - if dns._features.have("wmi"): - import threading - - import pythoncom # pylint: disable=import-error - import wmi # pylint: disable=import-error - - _have_wmi = True - else: - _have_wmi = False - - def _config_domain(domain): - # Sometimes DHCP servers add a '.' prefix to the default domain, and - # Windows just stores such values in the registry (see #687). - # Check for this and fix it. - if domain.startswith("."): - domain = domain[1:] - return dns.name.from_text(domain) - - class DnsInfo: - def __init__(self): - self.domain = None - self.nameservers = [] - self.search = [] - - if _have_wmi: - - class _WMIGetter(threading.Thread): - # pylint: disable=possibly-used-before-assignment - def __init__(self): - super().__init__() - self.info = DnsInfo() - - def run(self): - pythoncom.CoInitialize() - try: - system = wmi.WMI() - for interface in system.Win32_NetworkAdapterConfiguration(): - if interface.IPEnabled and interface.DNSServerSearchOrder: - self.info.nameservers = list(interface.DNSServerSearchOrder) - if interface.DNSDomain: - self.info.domain = _config_domain(interface.DNSDomain) - if interface.DNSDomainSuffixSearchOrder: - self.info.search = [ - _config_domain(x) - for x in interface.DNSDomainSuffixSearchOrder - ] - break - finally: - pythoncom.CoUninitialize() - - def get(self): - # We always run in a separate thread to avoid any issues with - # the COM threading model. - self.start() - self.join() - return self.info - - else: - - class _WMIGetter: # type: ignore - pass - - class _RegistryGetter: - def __init__(self): - self.info = DnsInfo() - - def _split(self, text): - # The windows registry has used both " " and "," as a delimiter, and while - # it is currently using "," in Windows 10 and later, updates can seemingly - # leave a space in too, e.g. "a, b". So we just convert all commas to - # spaces, and use split() in its default configuration, which splits on - # all whitespace and ignores empty strings. - return text.replace(",", " ").split() - - def _config_nameservers(self, nameservers): - for ns in self._split(nameservers): - if ns not in self.info.nameservers: - self.info.nameservers.append(ns) - - def _config_search(self, search): - for s in self._split(search): - s = _config_domain(s) - if s not in self.info.search: - self.info.search.append(s) - - def _config_fromkey(self, key, always_try_domain): - try: - servers, _ = winreg.QueryValueEx(key, "NameServer") - except WindowsError: - servers = None - if servers: - self._config_nameservers(servers) - if servers or always_try_domain: - try: - dom, _ = winreg.QueryValueEx(key, "Domain") - if dom: - self.info.domain = _config_domain(dom) - except WindowsError: - pass - else: - try: - servers, _ = winreg.QueryValueEx(key, "DhcpNameServer") - except WindowsError: - servers = None - if servers: - self._config_nameservers(servers) - try: - dom, _ = winreg.QueryValueEx(key, "DhcpDomain") - if dom: - self.info.domain = _config_domain(dom) - except WindowsError: - pass - try: - search, _ = winreg.QueryValueEx(key, "SearchList") - except WindowsError: - search = None - if search is None: - try: - search, _ = winreg.QueryValueEx(key, "DhcpSearchList") - except WindowsError: - search = None - if search: - self._config_search(search) - - def _is_nic_enabled(self, lm, guid): - # Look in the Windows Registry to determine whether the network - # interface corresponding to the given guid is enabled. - # - # (Code contributed by Paul Marks, thanks!) - # - try: - # This hard-coded location seems to be consistent, at least - # from Windows 2000 through Vista. - connection_key = winreg.OpenKey( - lm, - r"SYSTEM\CurrentControlSet\Control\Network" - r"\{4D36E972-E325-11CE-BFC1-08002BE10318}" - rf"\{guid}\Connection", - ) - - try: - # The PnpInstanceID points to a key inside Enum - (pnp_id, ttype) = winreg.QueryValueEx( - connection_key, "PnpInstanceID" - ) - - if ttype != winreg.REG_SZ: - raise ValueError # pragma: no cover - - device_key = winreg.OpenKey( - lm, rf"SYSTEM\CurrentControlSet\Enum\{pnp_id}" - ) - - try: - # Get ConfigFlags for this device - (flags, ttype) = winreg.QueryValueEx(device_key, "ConfigFlags") - - if ttype != winreg.REG_DWORD: - raise ValueError # pragma: no cover - - # Based on experimentation, bit 0x1 indicates that the - # device is disabled. - # - # XXXRTH I suspect we really want to & with 0x03 so - # that CONFIGFLAGS_REMOVED devices are also ignored, - # but we're shifting to WMI as ConfigFlags is not - # supposed to be used. - return not flags & 0x1 - - finally: - device_key.Close() - finally: - connection_key.Close() - except Exception: # pragma: no cover - return False - - def get(self): - """Extract resolver configuration from the Windows registry.""" - - lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) - try: - tcp_params = winreg.OpenKey( - lm, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" - ) - try: - self._config_fromkey(tcp_params, True) - finally: - tcp_params.Close() - interfaces = winreg.OpenKey( - lm, - r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces", - ) - try: - i = 0 - while True: - try: - guid = winreg.EnumKey(interfaces, i) - i += 1 - key = winreg.OpenKey(interfaces, guid) - try: - if not self._is_nic_enabled(lm, guid): - continue - self._config_fromkey(key, False) - finally: - key.Close() - except OSError: - break - finally: - interfaces.Close() - finally: - lm.Close() - return self.info - - _getter_class: Any - if _have_wmi and _prefer_wmi: - _getter_class = _WMIGetter - else: - _getter_class = _RegistryGetter - - def get_dns_info(): - """Extract resolver configuration.""" - getter = _getter_class() - return getter.get() diff --git a/venv/lib/python3.12/site-packages/dns/wire.py b/venv/lib/python3.12/site-packages/dns/wire.py deleted file mode 100644 index 9f9b1573..00000000 --- a/venv/lib/python3.12/site-packages/dns/wire.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import contextlib -import struct -from typing import Iterator, Optional, Tuple - -import dns.exception -import dns.name - - -class Parser: - def __init__(self, wire: bytes, current: int = 0): - self.wire = wire - self.current = 0 - self.end = len(self.wire) - if current: - self.seek(current) - self.furthest = current - - def remaining(self) -> int: - return self.end - self.current - - def get_bytes(self, size: int) -> bytes: - assert size >= 0 - if size > self.remaining(): - raise dns.exception.FormError - output = self.wire[self.current : self.current + size] - self.current += size - self.furthest = max(self.furthest, self.current) - return output - - def get_counted_bytes(self, length_size: int = 1) -> bytes: - length = int.from_bytes(self.get_bytes(length_size), "big") - return self.get_bytes(length) - - def get_remaining(self) -> bytes: - return self.get_bytes(self.remaining()) - - def get_uint8(self) -> int: - return struct.unpack("!B", self.get_bytes(1))[0] - - def get_uint16(self) -> int: - return struct.unpack("!H", self.get_bytes(2))[0] - - def get_uint32(self) -> int: - return struct.unpack("!I", self.get_bytes(4))[0] - - def get_uint48(self) -> int: - return int.from_bytes(self.get_bytes(6), "big") - - def get_struct(self, format: str) -> Tuple: - return struct.unpack(format, self.get_bytes(struct.calcsize(format))) - - def get_name(self, origin: Optional["dns.name.Name"] = None) -> "dns.name.Name": - name = dns.name.from_wire_parser(self) - if origin: - name = name.relativize(origin) - return name - - def seek(self, where: int) -> None: - # Note that seeking to the end is OK! (If you try to read - # after such a seek, you'll get an exception as expected.) - if where < 0 or where > self.end: - raise dns.exception.FormError - self.current = where - - @contextlib.contextmanager - def restrict_to(self, size: int) -> Iterator: - assert size >= 0 - if size > self.remaining(): - raise dns.exception.FormError - saved_end = self.end - try: - self.end = self.current + size - yield - # We make this check here and not in the finally as we - # don't want to raise if we're already raising for some - # other reason. - if self.current != self.end: - raise dns.exception.FormError - finally: - self.end = saved_end - - @contextlib.contextmanager - def restore_furthest(self) -> Iterator: - try: - yield None - finally: - self.current = self.furthest diff --git a/venv/lib/python3.12/site-packages/dns/xfr.py b/venv/lib/python3.12/site-packages/dns/xfr.py deleted file mode 100644 index 520aa32d..00000000 --- a/venv/lib/python3.12/site-packages/dns/xfr.py +++ /dev/null @@ -1,343 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from typing import Any, List, Optional, Tuple, Union - -import dns.exception -import dns.message -import dns.name -import dns.rcode -import dns.rdataset -import dns.rdatatype -import dns.serial -import dns.transaction -import dns.tsig -import dns.zone - - -class TransferError(dns.exception.DNSException): - """A zone transfer response got a non-zero rcode.""" - - def __init__(self, rcode): - message = f"Zone transfer error: {dns.rcode.to_text(rcode)}" - super().__init__(message) - self.rcode = rcode - - -class SerialWentBackwards(dns.exception.FormError): - """The current serial number is less than the serial we know.""" - - -class UseTCP(dns.exception.DNSException): - """This IXFR cannot be completed with UDP.""" - - -class Inbound: - """ - State machine for zone transfers. - """ - - def __init__( - self, - txn_manager: dns.transaction.TransactionManager, - rdtype: dns.rdatatype.RdataType = dns.rdatatype.AXFR, - serial: Optional[int] = None, - is_udp: bool = False, - ): - """Initialize an inbound zone transfer. - - *txn_manager* is a :py:class:`dns.transaction.TransactionManager`. - - *rdtype* can be `dns.rdatatype.AXFR` or `dns.rdatatype.IXFR` - - *serial* is the base serial number for IXFRs, and is required in - that case. - - *is_udp*, a ``bool`` indidicates if UDP is being used for this - XFR. - """ - self.txn_manager = txn_manager - self.txn: Optional[dns.transaction.Transaction] = None - self.rdtype = rdtype - if rdtype == dns.rdatatype.IXFR: - if serial is None: - raise ValueError("a starting serial must be supplied for IXFRs") - elif is_udp: - raise ValueError("is_udp specified for AXFR") - self.serial = serial - self.is_udp = is_udp - (_, _, self.origin) = txn_manager.origin_information() - self.soa_rdataset: Optional[dns.rdataset.Rdataset] = None - self.done = False - self.expecting_SOA = False - self.delete_mode = False - - def process_message(self, message: dns.message.Message) -> bool: - """Process one message in the transfer. - - The message should have the same relativization as was specified when - the `dns.xfr.Inbound` was created. The message should also have been - created with `one_rr_per_rrset=True` because order matters. - - Returns `True` if the transfer is complete, and `False` otherwise. - """ - if self.txn is None: - replacement = self.rdtype == dns.rdatatype.AXFR - self.txn = self.txn_manager.writer(replacement) - rcode = message.rcode() - if rcode != dns.rcode.NOERROR: - raise TransferError(rcode) - # - # We don't require a question section, but if it is present is - # should be correct. - # - if len(message.question) > 0: - if message.question[0].name != self.origin: - raise dns.exception.FormError("wrong question name") - if message.question[0].rdtype != self.rdtype: - raise dns.exception.FormError("wrong question rdatatype") - answer_index = 0 - if self.soa_rdataset is None: - # - # This is the first message. We're expecting an SOA at - # the origin. - # - if not message.answer or message.answer[0].name != self.origin: - raise dns.exception.FormError("No answer or RRset not for zone origin") - rrset = message.answer[0] - rdataset = rrset - if rdataset.rdtype != dns.rdatatype.SOA: - raise dns.exception.FormError("first RRset is not an SOA") - answer_index = 1 - self.soa_rdataset = rdataset.copy() - if self.rdtype == dns.rdatatype.IXFR: - if self.soa_rdataset[0].serial == self.serial: - # - # We're already up-to-date. - # - self.done = True - elif dns.serial.Serial(self.soa_rdataset[0].serial) < self.serial: - # It went backwards! - raise SerialWentBackwards - else: - if self.is_udp and len(message.answer[answer_index:]) == 0: - # - # There are no more records, so this is the - # "truncated" response. Say to use TCP - # - raise UseTCP - # - # Note we're expecting another SOA so we can detect - # if this IXFR response is an AXFR-style response. - # - self.expecting_SOA = True - # - # Process the answer section (other than the initial SOA in - # the first message). - # - for rrset in message.answer[answer_index:]: - name = rrset.name - rdataset = rrset - if self.done: - raise dns.exception.FormError("answers after final SOA") - assert self.txn is not None # for mypy - if rdataset.rdtype == dns.rdatatype.SOA and name == self.origin: - # - # Every time we see an origin SOA delete_mode inverts - # - if self.rdtype == dns.rdatatype.IXFR: - self.delete_mode = not self.delete_mode - # - # If this SOA Rdataset is equal to the first we saw - # then we're finished. If this is an IXFR we also - # check that we're seeing the record in the expected - # part of the response. - # - if rdataset == self.soa_rdataset and ( - self.rdtype == dns.rdatatype.AXFR - or (self.rdtype == dns.rdatatype.IXFR and self.delete_mode) - ): - # - # This is the final SOA - # - if self.expecting_SOA: - # We got an empty IXFR sequence! - raise dns.exception.FormError("empty IXFR sequence") - if ( - self.rdtype == dns.rdatatype.IXFR - and self.serial != rdataset[0].serial - ): - raise dns.exception.FormError("unexpected end of IXFR sequence") - self.txn.replace(name, rdataset) - self.txn.commit() - self.txn = None - self.done = True - else: - # - # This is not the final SOA - # - self.expecting_SOA = False - if self.rdtype == dns.rdatatype.IXFR: - if self.delete_mode: - # This is the start of an IXFR deletion set - if rdataset[0].serial != self.serial: - raise dns.exception.FormError( - "IXFR base serial mismatch" - ) - else: - # This is the start of an IXFR addition set - self.serial = rdataset[0].serial - self.txn.replace(name, rdataset) - else: - # We saw a non-final SOA for the origin in an AXFR. - raise dns.exception.FormError("unexpected origin SOA in AXFR") - continue - if self.expecting_SOA: - # - # We made an IXFR request and are expecting another - # SOA RR, but saw something else, so this must be an - # AXFR response. - # - self.rdtype = dns.rdatatype.AXFR - self.expecting_SOA = False - self.delete_mode = False - self.txn.rollback() - self.txn = self.txn_manager.writer(True) - # - # Note we are falling through into the code below - # so whatever rdataset this was gets written. - # - # Add or remove the data - if self.delete_mode: - self.txn.delete_exact(name, rdataset) - else: - self.txn.add(name, rdataset) - if self.is_udp and not self.done: - # - # This is a UDP IXFR and we didn't get to done, and we didn't - # get the proper "truncated" response - # - raise dns.exception.FormError("unexpected end of UDP IXFR") - return self.done - - # - # Inbounds are context managers. - # - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if self.txn: - self.txn.rollback() - return False - - -def make_query( - txn_manager: dns.transaction.TransactionManager, - serial: Optional[int] = 0, - use_edns: Optional[Union[int, bool]] = None, - ednsflags: Optional[int] = None, - payload: Optional[int] = None, - request_payload: Optional[int] = None, - options: Optional[List[dns.edns.Option]] = None, - keyring: Any = None, - keyname: Optional[dns.name.Name] = None, - keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, -) -> Tuple[dns.message.QueryMessage, Optional[int]]: - """Make an AXFR or IXFR query. - - *txn_manager* is a ``dns.transaction.TransactionManager``, typically a - ``dns.zone.Zone``. - - *serial* is an ``int`` or ``None``. If 0, then IXFR will be - attempted using the most recent serial number from the - *txn_manager*; it is the caller's responsibility to ensure there - are no write transactions active that could invalidate the - retrieved serial. If a serial cannot be determined, AXFR will be - forced. Other integer values are the starting serial to use. - ``None`` forces an AXFR. - - Please see the documentation for :py:func:`dns.message.make_query` and - :py:func:`dns.message.Message.use_tsig` for details on the other parameters - to this function. - - Returns a `(query, serial)` tuple. - """ - (zone_origin, _, origin) = txn_manager.origin_information() - if zone_origin is None: - raise ValueError("no zone origin") - if serial is None: - rdtype = dns.rdatatype.AXFR - elif not isinstance(serial, int): - raise ValueError("serial is not an integer") - elif serial == 0: - with txn_manager.reader() as txn: - rdataset = txn.get(origin, "SOA") - if rdataset: - serial = rdataset[0].serial - rdtype = dns.rdatatype.IXFR - else: - serial = None - rdtype = dns.rdatatype.AXFR - elif serial > 0 and serial < 4294967296: - rdtype = dns.rdatatype.IXFR - else: - raise ValueError("serial out-of-range") - rdclass = txn_manager.get_class() - q = dns.message.make_query( - zone_origin, - rdtype, - rdclass, - use_edns, - False, - ednsflags, - payload, - request_payload, - options, - ) - if serial is not None: - rdata = dns.rdata.from_text(rdclass, "SOA", f". . {serial} 0 0 0 0") - rrset = q.find_rrset( - q.authority, zone_origin, rdclass, dns.rdatatype.SOA, create=True - ) - rrset.add(rdata, 0) - if keyring is not None: - q.use_tsig(keyring, keyname, algorithm=keyalgorithm) - return (q, serial) - - -def extract_serial_from_query(query: dns.message.Message) -> Optional[int]: - """Extract the SOA serial number from query if it is an IXFR and return - it, otherwise return None. - - *query* is a dns.message.QueryMessage that is an IXFR or AXFR request. - - Raises if the query is not an IXFR or AXFR, or if an IXFR doesn't have - an appropriate SOA RRset in the authority section. - """ - if not isinstance(query, dns.message.QueryMessage): - raise ValueError("query not a QueryMessage") - question = query.question[0] - if question.rdtype == dns.rdatatype.AXFR: - return None - elif question.rdtype != dns.rdatatype.IXFR: - raise ValueError("query is not an AXFR or IXFR") - soa = query.find_rrset( - query.authority, question.name, question.rdclass, dns.rdatatype.SOA - ) - return soa[0].serial diff --git a/venv/lib/python3.12/site-packages/dns/zone.py b/venv/lib/python3.12/site-packages/dns/zone.py deleted file mode 100644 index 844919e4..00000000 --- a/venv/lib/python3.12/site-packages/dns/zone.py +++ /dev/null @@ -1,1434 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Zones.""" - -import contextlib -import io -import os -import struct -from typing import ( - Any, - Callable, - Iterable, - Iterator, - List, - MutableMapping, - Optional, - Set, - Tuple, - Union, -) - -import dns.exception -import dns.grange -import dns.immutable -import dns.name -import dns.node -import dns.rdata -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rdtypes.ANY.SOA -import dns.rdtypes.ANY.ZONEMD -import dns.rrset -import dns.tokenizer -import dns.transaction -import dns.ttl -import dns.zonefile -from dns.zonetypes import DigestHashAlgorithm, DigestScheme, _digest_hashers - - -class BadZone(dns.exception.DNSException): - """The DNS zone is malformed.""" - - -class NoSOA(BadZone): - """The DNS zone has no SOA RR at its origin.""" - - -class NoNS(BadZone): - """The DNS zone has no NS RRset at its origin.""" - - -class UnknownOrigin(BadZone): - """The DNS zone's origin is unknown.""" - - -class UnsupportedDigestScheme(dns.exception.DNSException): - """The zone digest's scheme is unsupported.""" - - -class UnsupportedDigestHashAlgorithm(dns.exception.DNSException): - """The zone digest's origin is unsupported.""" - - -class NoDigest(dns.exception.DNSException): - """The DNS zone has no ZONEMD RRset at its origin.""" - - -class DigestVerificationFailure(dns.exception.DNSException): - """The ZONEMD digest failed to verify.""" - - -def _validate_name( - name: dns.name.Name, - origin: Optional[dns.name.Name], - relativize: bool, -) -> dns.name.Name: - # This name validation code is shared by Zone and Version - if origin is None: - # This should probably never happen as other code (e.g. - # _rr_line) will notice the lack of an origin before us, but - # we check just in case! - raise KeyError("no zone origin is defined") - if name.is_absolute(): - if not name.is_subdomain(origin): - raise KeyError("name parameter must be a subdomain of the zone origin") - if relativize: - name = name.relativize(origin) - else: - # We have a relative name. Make sure that the derelativized name is - # not too long. - try: - abs_name = name.derelativize(origin) - except dns.name.NameTooLong: - # We map dns.name.NameTooLong to KeyError to be consistent with - # the other exceptions above. - raise KeyError("relative name too long for zone") - if not relativize: - # We have a relative name in a non-relative zone, so use the - # derelativized name. - name = abs_name - return name - - -class Zone(dns.transaction.TransactionManager): - """A DNS zone. - - A ``Zone`` is a mapping from names to nodes. The zone object may be - treated like a Python dictionary, e.g. ``zone[name]`` will retrieve - the node associated with that name. The *name* may be a - ``dns.name.Name object``, or it may be a string. In either case, - if the name is relative it is treated as relative to the origin of - the zone. - """ - - node_factory: Callable[[], dns.node.Node] = dns.node.Node - map_factory: Callable[[], MutableMapping[dns.name.Name, dns.node.Node]] = dict - writable_version_factory: Optional[Callable[[], "WritableVersion"]] = None - immutable_version_factory: Optional[Callable[[], "ImmutableVersion"]] = None - - __slots__ = ["rdclass", "origin", "nodes", "relativize"] - - def __init__( - self, - origin: Optional[Union[dns.name.Name, str]], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - ): - """Initialize a zone object. - - *origin* is the origin of the zone. It may be a ``dns.name.Name``, - a ``str``, or ``None``. If ``None``, then the zone's origin will - be set by the first ``$ORIGIN`` line in a zone file. - - *rdclass*, an ``int``, the zone's rdata class; the default is class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - """ - - if origin is not None: - if isinstance(origin, str): - origin = dns.name.from_text(origin) - elif not isinstance(origin, dns.name.Name): - raise ValueError("origin parameter must be convertible to a DNS name") - if not origin.is_absolute(): - raise ValueError("origin parameter must be an absolute name") - self.origin = origin - self.rdclass = rdclass - self.nodes: MutableMapping[dns.name.Name, dns.node.Node] = self.map_factory() - self.relativize = relativize - - def __eq__(self, other): - """Two zones are equal if they have the same origin, class, and - nodes. - - Returns a ``bool``. - """ - - if not isinstance(other, Zone): - return False - if ( - self.rdclass != other.rdclass - or self.origin != other.origin - or self.nodes != other.nodes - ): - return False - return True - - def __ne__(self, other): - """Are two zones not equal? - - Returns a ``bool``. - """ - - return not self.__eq__(other) - - def _validate_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: - # Note that any changes in this method should have corresponding changes - # made in the Version _validate_name() method. - if isinstance(name, str): - name = dns.name.from_text(name, None) - elif not isinstance(name, dns.name.Name): - raise KeyError("name parameter must be convertible to a DNS name") - return _validate_name(name, self.origin, self.relativize) - - def __getitem__(self, key): - key = self._validate_name(key) - return self.nodes[key] - - def __setitem__(self, key, value): - key = self._validate_name(key) - self.nodes[key] = value - - def __delitem__(self, key): - key = self._validate_name(key) - del self.nodes[key] - - def __iter__(self): - return self.nodes.__iter__() - - def keys(self): - return self.nodes.keys() - - def values(self): - return self.nodes.values() - - def items(self): - return self.nodes.items() - - def get(self, key): - key = self._validate_name(key) - return self.nodes.get(key) - - def __contains__(self, key): - key = self._validate_name(key) - return key in self.nodes - - def find_node( - self, name: Union[dns.name.Name, str], create: bool = False - ) -> dns.node.Node: - """Find a node in the zone, possibly creating it. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.node.Node``. - """ - - name = self._validate_name(name) - node = self.nodes.get(name) - if node is None: - if not create: - raise KeyError - node = self.node_factory() - self.nodes[name] = node - return node - - def get_node( - self, name: Union[dns.name.Name, str], create: bool = False - ) -> Optional[dns.node.Node]: - """Get a node in the zone, possibly creating it. - - This method is like ``find_node()``, except it returns None instead - of raising an exception if the node does not exist and creation - has not been requested. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Returns a ``dns.node.Node`` or ``None``. - """ - - try: - node = self.find_node(name, create) - except KeyError: - node = None - return node - - def delete_node(self, name: Union[dns.name.Name, str]) -> None: - """Delete the specified node if it exists. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - It is not an error if the node does not exist. - """ - - name = self._validate_name(name) - if name in self.nodes: - del self.nodes[name] - - def find_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - """Look for an rdataset with the specified name and type in the zone, - and return an rdataset encapsulating it. - - The rdataset returned is not a copy; changes to it will change - the zone. - - KeyError is raised if the name or type are not found. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str`` the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.rdataset.Rdataset``. - """ - - name = self._validate_name(name) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - node = self.find_node(name, create) - return node.find_rdataset(self.rdclass, rdtype, covers, create) - - def get_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - """Look for an rdataset with the specified name and type in the zone. - - This method is like ``find_rdataset()``, except it returns None instead - of raising an exception if the rdataset does not exist and creation - has not been requested. - - The rdataset returned is not a copy; changes to it will change - the zone. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.rdataset.Rdataset`` or ``None``. - """ - - try: - rdataset = self.find_rdataset(name, rdtype, covers, create) - except KeyError: - rdataset = None - return rdataset - - def delete_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> None: - """Delete the rdataset matching *rdtype* and *covers*, if it - exists at the node specified by *name*. - - It is not an error if the node does not exist, or if there is no matching - rdataset at the node. - - If the node has no rdatasets after the deletion, it will itself be deleted. - - *name*: the name of the node to find. The value may be a ``dns.name.Name`` or a - ``str``. If absolute, the name must be a subdomain of the zone's origin. If - ``zone.relativize`` is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str`` or ``None``, the covered - type. Usually this value is ``dns.rdatatype.NONE``, but if the rdtype is - ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, then the covers value will be - the rdata type the SIG/RRSIG covers. The library treats the SIG and RRSIG types - as if they were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This - makes RRSIGs much easier to work with than if RRSIGs covering different rdata - types were aggregated into a single RRSIG rdataset. - """ - - name = self._validate_name(name) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - node = self.get_node(name) - if node is not None: - node.delete_rdataset(self.rdclass, rdtype, covers) - if len(node) == 0: - self.delete_node(name) - - def replace_rdataset( - self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset - ) -> None: - """Replace an rdataset at name. - - It is not an error if there is no rdataset matching I{replacement}. - - Ownership of the *replacement* object is transferred to the zone; - in other words, this method does not store a copy of *replacement* - at the node, it stores *replacement* itself. - - If the node does not exist, it is created. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *replacement*, a ``dns.rdataset.Rdataset``, the replacement rdataset. - """ - - if replacement.rdclass != self.rdclass: - raise ValueError("replacement.rdclass != zone.rdclass") - node = self.find_node(name, True) - node.replace_rdataset(replacement) - - def find_rrset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> dns.rrset.RRset: - """Look for an rdataset with the specified name and type in the zone, - and return an RRset encapsulating it. - - This method is less efficient than the similar - ``find_rdataset()`` because it creates an RRset instead of - returning the matching rdataset. It may be more convenient - for some uses since it returns an object which binds the owner - name to the rdataset. - - This method may not be used to create new nodes or rdatasets; - use ``find_rdataset`` instead. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.rrset.RRset`` or ``None``. - """ - - vname = self._validate_name(name) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - rdataset = self.nodes[vname].find_rdataset(self.rdclass, rdtype, covers) - rrset = dns.rrset.RRset(vname, self.rdclass, rdtype, covers) - rrset.update(rdataset) - return rrset - - def get_rrset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> Optional[dns.rrset.RRset]: - """Look for an rdataset with the specified name and type in the zone, - and return an RRset encapsulating it. - - This method is less efficient than the similar ``get_rdataset()`` - because it creates an RRset instead of returning the matching - rdataset. It may be more convenient for some uses since it - returns an object which binds the owner name to the rdataset. - - This method may not be used to create new nodes or rdatasets; - use ``get_rdataset()`` instead. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Returns a ``dns.rrset.RRset`` or ``None``. - """ - - try: - rrset = self.find_rrset(name, rdtype, covers) - except KeyError: - rrset = None - return rrset - - def iterate_rdatasets( - self, - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: - """Return a generator which yields (name, rdataset) tuples for - all rdatasets in the zone which have the specified *rdtype* - and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, - then all rdatasets will be matched. - - *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - """ - - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - for name, node in self.items(): - for rds in node: - if rdtype == dns.rdatatype.ANY or ( - rds.rdtype == rdtype and rds.covers == covers - ): - yield (name, rds) - - def iterate_rdatas( - self, - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> Iterator[Tuple[dns.name.Name, int, dns.rdata.Rdata]]: - """Return a generator which yields (name, ttl, rdata) tuples for - all rdatas in the zone which have the specified *rdtype* - and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, - then all rdatas will be matched. - - *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - """ - - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - for name, node in self.items(): - for rds in node: - if rdtype == dns.rdatatype.ANY or ( - rds.rdtype == rdtype and rds.covers == covers - ): - for rdata in rds: - yield (name, rds.ttl, rdata) - - def to_file( - self, - f: Any, - sorted: bool = True, - relativize: bool = True, - nl: Optional[str] = None, - want_comments: bool = False, - want_origin: bool = False, - ) -> None: - """Write a zone to a file. - - *f*, a file or `str`. If *f* is a string, it is treated - as the name of a file to open. - - *sorted*, a ``bool``. If True, the default, then the file - will be written with the names sorted in DNSSEC order from - least to greatest. Otherwise the names will be written in - whatever order they happen to have in the zone's dictionary. - - *relativize*, a ``bool``. If True, the default, then domain - names in the output will be relativized to the zone's origin - if possible. - - *nl*, a ``str`` or None. The end of line string. If not - ``None``, the output will use the platform's native - end-of-line marker (i.e. LF on POSIX, CRLF on Windows). - - *want_comments*, a ``bool``. If ``True``, emit end-of-line comments - as part of writing the file. If ``False``, the default, do not - emit them. - - *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at - the start of the file. If ``False``, the default, do not emit - one. - """ - - if isinstance(f, str): - cm: contextlib.AbstractContextManager = open(f, "wb") - else: - cm = contextlib.nullcontext(f) - with cm as f: - # must be in this way, f.encoding may contain None, or even - # attribute may not be there - file_enc = getattr(f, "encoding", None) - if file_enc is None: - file_enc = "utf-8" - - if nl is None: - # binary mode, '\n' is not enough - nl_b = os.linesep.encode(file_enc) - nl = "\n" - elif isinstance(nl, str): - nl_b = nl.encode(file_enc) - else: - nl_b = nl - nl = nl.decode() - - if want_origin: - assert self.origin is not None - l = "$ORIGIN " + self.origin.to_text() - l_b = l.encode(file_enc) - try: - f.write(l_b) - f.write(nl_b) - except TypeError: # textual mode - f.write(l) - f.write(nl) - - if sorted: - names = list(self.keys()) - names.sort() - else: - names = self.keys() - for n in names: - l = self[n].to_text( - n, - origin=self.origin, - relativize=relativize, - want_comments=want_comments, - ) - l_b = l.encode(file_enc) - - try: - f.write(l_b) - f.write(nl_b) - except TypeError: # textual mode - f.write(l) - f.write(nl) - - def to_text( - self, - sorted: bool = True, - relativize: bool = True, - nl: Optional[str] = None, - want_comments: bool = False, - want_origin: bool = False, - ) -> str: - """Return a zone's text as though it were written to a file. - - *sorted*, a ``bool``. If True, the default, then the file - will be written with the names sorted in DNSSEC order from - least to greatest. Otherwise the names will be written in - whatever order they happen to have in the zone's dictionary. - - *relativize*, a ``bool``. If True, the default, then domain - names in the output will be relativized to the zone's origin - if possible. - - *nl*, a ``str`` or None. The end of line string. If not - ``None``, the output will use the platform's native - end-of-line marker (i.e. LF on POSIX, CRLF on Windows). - - *want_comments*, a ``bool``. If ``True``, emit end-of-line comments - as part of writing the file. If ``False``, the default, do not - emit them. - - *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at - the start of the output. If ``False``, the default, do not emit - one. - - Returns a ``str``. - """ - temp_buffer = io.StringIO() - self.to_file(temp_buffer, sorted, relativize, nl, want_comments, want_origin) - return_value = temp_buffer.getvalue() - temp_buffer.close() - return return_value - - def check_origin(self) -> None: - """Do some simple checking of the zone's origin. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - """ - if self.relativize: - name = dns.name.empty - else: - assert self.origin is not None - name = self.origin - if self.get_rdataset(name, dns.rdatatype.SOA) is None: - raise NoSOA - if self.get_rdataset(name, dns.rdatatype.NS) is None: - raise NoNS - - def get_soa( - self, txn: Optional[dns.transaction.Transaction] = None - ) -> dns.rdtypes.ANY.SOA.SOA: - """Get the zone SOA rdata. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Returns a ``dns.rdtypes.ANY.SOA.SOA`` Rdata. - """ - if self.relativize: - origin_name = dns.name.empty - else: - if self.origin is None: - # get_soa() has been called very early, and there must not be - # an SOA if there is no origin. - raise NoSOA - origin_name = self.origin - soa: Optional[dns.rdataset.Rdataset] - if txn: - soa = txn.get(origin_name, dns.rdatatype.SOA) - else: - soa = self.get_rdataset(origin_name, dns.rdatatype.SOA) - if soa is None: - raise NoSOA - return soa[0] - - def _compute_digest( - self, - hash_algorithm: DigestHashAlgorithm, - scheme: DigestScheme = DigestScheme.SIMPLE, - ) -> bytes: - hashinfo = _digest_hashers.get(hash_algorithm) - if not hashinfo: - raise UnsupportedDigestHashAlgorithm - if scheme != DigestScheme.SIMPLE: - raise UnsupportedDigestScheme - - if self.relativize: - origin_name = dns.name.empty - else: - assert self.origin is not None - origin_name = self.origin - hasher = hashinfo() - for name, node in sorted(self.items()): - rrnamebuf = name.to_digestable(self.origin) - for rdataset in sorted(node, key=lambda rds: (rds.rdtype, rds.covers)): - if name == origin_name and dns.rdatatype.ZONEMD in ( - rdataset.rdtype, - rdataset.covers, - ): - continue - rrfixed = struct.pack( - "!HHI", rdataset.rdtype, rdataset.rdclass, rdataset.ttl - ) - rdatas = [rdata.to_digestable(self.origin) for rdata in rdataset] - for rdata in sorted(rdatas): - rrlen = struct.pack("!H", len(rdata)) - hasher.update(rrnamebuf + rrfixed + rrlen + rdata) - return hasher.digest() - - def compute_digest( - self, - hash_algorithm: DigestHashAlgorithm, - scheme: DigestScheme = DigestScheme.SIMPLE, - ) -> dns.rdtypes.ANY.ZONEMD.ZONEMD: - serial = self.get_soa().serial - digest = self._compute_digest(hash_algorithm, scheme) - return dns.rdtypes.ANY.ZONEMD.ZONEMD( - self.rdclass, dns.rdatatype.ZONEMD, serial, scheme, hash_algorithm, digest - ) - - def verify_digest( - self, zonemd: Optional[dns.rdtypes.ANY.ZONEMD.ZONEMD] = None - ) -> None: - digests: Union[dns.rdataset.Rdataset, List[dns.rdtypes.ANY.ZONEMD.ZONEMD]] - if zonemd: - digests = [zonemd] - else: - assert self.origin is not None - rds = self.get_rdataset(self.origin, dns.rdatatype.ZONEMD) - if rds is None: - raise NoDigest - digests = rds - for digest in digests: - try: - computed = self._compute_digest(digest.hash_algorithm, digest.scheme) - if computed == digest.digest: - return - except Exception: - pass - raise DigestVerificationFailure - - # TransactionManager methods - - def reader(self) -> "Transaction": - return Transaction(self, False, Version(self, 1, self.nodes, self.origin)) - - def writer(self, replacement: bool = False) -> "Transaction": - txn = Transaction(self, replacement) - txn._setup_version() - return txn - - def origin_information( - self, - ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: - effective: Optional[dns.name.Name] - if self.relativize: - effective = dns.name.empty - else: - effective = self.origin - return (self.origin, self.relativize, effective) - - def get_class(self): - return self.rdclass - - # Transaction methods - - def _end_read(self, txn): - pass - - def _end_write(self, txn): - pass - - def _commit_version(self, _, version, origin): - self.nodes = version.nodes - if self.origin is None: - self.origin = origin - - def _get_next_version_id(self): - # Versions are ephemeral and all have id 1 - return 1 - - -# These classes used to be in dns.versioned, but have moved here so we can use -# the copy-on-write transaction mechanism for both kinds of zones. In a -# regular zone, the version only exists during the transaction, and the nodes -# are regular dns.node.Nodes. - -# A node with a version id. - - -class VersionedNode(dns.node.Node): # lgtm[py/missing-equals] - __slots__ = ["id"] - - def __init__(self): - super().__init__() - # A proper id will get set by the Version - self.id = 0 - - -@dns.immutable.immutable -class ImmutableVersionedNode(VersionedNode): - def __init__(self, node): - super().__init__() - self.id = node.id - self.rdatasets = tuple( - [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] - ) - - def find_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - if create: - raise TypeError("immutable") - return super().find_rdataset(rdclass, rdtype, covers, False) - - def get_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - if create: - raise TypeError("immutable") - return super().get_rdataset(rdclass, rdtype, covers, False) - - def delete_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ) -> None: - raise TypeError("immutable") - - def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: - raise TypeError("immutable") - - def is_immutable(self) -> bool: - return True - - -class Version: - def __init__( - self, - zone: Zone, - id: int, - nodes: Optional[MutableMapping[dns.name.Name, dns.node.Node]] = None, - origin: Optional[dns.name.Name] = None, - ): - self.zone = zone - self.id = id - if nodes is not None: - self.nodes = nodes - else: - self.nodes = zone.map_factory() - self.origin = origin - - def _validate_name(self, name: dns.name.Name) -> dns.name.Name: - return _validate_name(name, self.origin, self.zone.relativize) - - def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: - name = self._validate_name(name) - return self.nodes.get(name) - - def get_rdataset( - self, - name: dns.name.Name, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - ) -> Optional[dns.rdataset.Rdataset]: - node = self.get_node(name) - if node is None: - return None - return node.get_rdataset(self.zone.rdclass, rdtype, covers) - - def keys(self): - return self.nodes.keys() - - def items(self): - return self.nodes.items() - - -class WritableVersion(Version): - def __init__(self, zone: Zone, replacement: bool = False): - # The zone._versions_lock must be held by our caller in a versioned - # zone. - id = zone._get_next_version_id() - super().__init__(zone, id) - if not replacement: - # We copy the map, because that gives us a simple and thread-safe - # way of doing versions, and we have a garbage collector to help - # us. We only make new node objects if we actually change the - # node. - self.nodes.update(zone.nodes) - # We have to copy the zone origin as it may be None in the first - # version, and we don't want to mutate the zone until we commit. - self.origin = zone.origin - self.changed: Set[dns.name.Name] = set() - - def _maybe_cow(self, name: dns.name.Name) -> dns.node.Node: - name = self._validate_name(name) - node = self.nodes.get(name) - if node is None or name not in self.changed: - new_node = self.zone.node_factory() - if hasattr(new_node, "id"): - # We keep doing this for backwards compatibility, as earlier - # code used new_node.id != self.id for the "do we need to CoW?" - # test. Now we use the changed set as this works with both - # regular zones and versioned zones. - # - # We ignore the mypy error as this is safe but it doesn't see it. - new_node.id = self.id # type: ignore - if node is not None: - # moo! copy on write! - new_node.rdatasets.extend(node.rdatasets) - self.nodes[name] = new_node - self.changed.add(name) - return new_node - else: - return node - - def delete_node(self, name: dns.name.Name) -> None: - name = self._validate_name(name) - if name in self.nodes: - del self.nodes[name] - self.changed.add(name) - - def put_rdataset( - self, name: dns.name.Name, rdataset: dns.rdataset.Rdataset - ) -> None: - node = self._maybe_cow(name) - node.replace_rdataset(rdataset) - - def delete_rdataset( - self, - name: dns.name.Name, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - ) -> None: - node = self._maybe_cow(name) - node.delete_rdataset(self.zone.rdclass, rdtype, covers) - if len(node) == 0: - del self.nodes[name] - - -@dns.immutable.immutable -class ImmutableVersion(Version): - def __init__(self, version: WritableVersion): - # We tell super() that it's a replacement as we don't want it - # to copy the nodes, as we're about to do that with an - # immutable Dict. - super().__init__(version.zone, True) - # set the right id! - self.id = version.id - # keep the origin - self.origin = version.origin - # Make changed nodes immutable - for name in version.changed: - node = version.nodes.get(name) - # it might not exist if we deleted it in the version - if node: - version.nodes[name] = ImmutableVersionedNode(node) - # We're changing the type of the nodes dictionary here on purpose, so - # we ignore the mypy error. - self.nodes = dns.immutable.Dict( - version.nodes, True, self.zone.map_factory - ) # type: ignore - - -class Transaction(dns.transaction.Transaction): - def __init__(self, zone, replacement, version=None, make_immutable=False): - read_only = version is not None - super().__init__(zone, replacement, read_only) - self.version = version - self.make_immutable = make_immutable - - @property - def zone(self): - return self.manager - - def _setup_version(self): - assert self.version is None - factory = self.manager.writable_version_factory - if factory is None: - factory = WritableVersion - self.version = factory(self.zone, self.replacement) - - def _get_rdataset(self, name, rdtype, covers): - return self.version.get_rdataset(name, rdtype, covers) - - def _put_rdataset(self, name, rdataset): - assert not self.read_only - self.version.put_rdataset(name, rdataset) - - def _delete_name(self, name): - assert not self.read_only - self.version.delete_node(name) - - def _delete_rdataset(self, name, rdtype, covers): - assert not self.read_only - self.version.delete_rdataset(name, rdtype, covers) - - def _name_exists(self, name): - return self.version.get_node(name) is not None - - def _changed(self): - if self.read_only: - return False - else: - return len(self.version.changed) > 0 - - def _end_transaction(self, commit): - if self.read_only: - self.zone._end_read(self) - elif commit and len(self.version.changed) > 0: - if self.make_immutable: - factory = self.manager.immutable_version_factory - if factory is None: - factory = ImmutableVersion - version = factory(self.version) - else: - version = self.version - self.zone._commit_version(self, version, self.version.origin) - else: - # rollback - self.zone._end_write(self) - - def _set_origin(self, origin): - if self.version.origin is None: - self.version.origin = origin - - def _iterate_rdatasets(self): - for name, node in self.version.items(): - for rdataset in node: - yield (name, rdataset) - - def _iterate_names(self): - return self.version.keys() - - def _get_node(self, name): - return self.version.get_node(name) - - def _origin_information(self): - (absolute, relativize, effective) = self.manager.origin_information() - if absolute is None and self.version.origin is not None: - # No origin has been committed yet, but we've learned one as part of - # this txn. Use it. - absolute = self.version.origin - if relativize: - effective = dns.name.empty - else: - effective = absolute - return (absolute, relativize, effective) - - -def _from_text( - text: Any, - origin: Optional[Union[dns.name.Name, str]] = None, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - zone_factory: Any = Zone, - filename: Optional[str] = None, - allow_include: bool = False, - check_origin: bool = True, - idna_codec: Optional[dns.name.IDNACodec] = None, - allow_directives: Union[bool, Iterable[str]] = True, -) -> Zone: - # See the comments for the public APIs from_text() and from_file() for - # details. - - # 'text' can also be a file, but we don't publish that fact - # since it's an implementation detail. The official file - # interface is from_file(). - - if filename is None: - filename = "" - zone = zone_factory(origin, rdclass, relativize=relativize) - with zone.writer(True) as txn: - tok = dns.tokenizer.Tokenizer(text, filename, idna_codec=idna_codec) - reader = dns.zonefile.Reader( - tok, - rdclass, - txn, - allow_include=allow_include, - allow_directives=allow_directives, - ) - try: - reader.read() - except dns.zonefile.UnknownOrigin: - # for backwards compatibility - raise dns.zone.UnknownOrigin - # Now that we're done reading, do some basic checking of the zone. - if check_origin: - zone.check_origin() - return zone - - -def from_text( - text: str, - origin: Optional[Union[dns.name.Name, str]] = None, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - zone_factory: Any = Zone, - filename: Optional[str] = None, - allow_include: bool = False, - check_origin: bool = True, - idna_codec: Optional[dns.name.IDNACodec] = None, - allow_directives: Union[bool, Iterable[str]] = True, -) -> Zone: - """Build a zone object from a zone file format string. - - *text*, a ``str``, the zone file format input. - - *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin - of the zone; if not specified, the first ``$ORIGIN`` statement in the - zone file will determine the origin of the zone. - - *rdclass*, a ``dns.rdataclass.RdataClass``, the zone's rdata class; the default is - class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - - *zone_factory*, the zone factory to use or ``None``. If ``None``, then - ``dns.zone.Zone`` will be used. The value may be any class or callable - that returns a subclass of ``dns.zone.Zone``. - - *filename*, a ``str`` or ``None``, the filename to emit when - describing where an error occurred; the default is ``''``. - - *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` - directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` - will raise a ``SyntaxError`` exception. - - *check_origin*, a ``bool``. If ``True``, the default, then sanity - checks of the origin node will be made by calling the zone's - ``check_origin()`` method. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, - then directives are permitted, and the *allow_include* parameter controls whether - ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive - processing is done and any directive-like text will be treated as a regular owner - name. If a non-empty iterable, then only the listed directives (including the - ``$``) are allowed. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - - Returns a subclass of ``dns.zone.Zone``. - """ - return _from_text( - text, - origin, - rdclass, - relativize, - zone_factory, - filename, - allow_include, - check_origin, - idna_codec, - allow_directives, - ) - - -def from_file( - f: Any, - origin: Optional[Union[dns.name.Name, str]] = None, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - zone_factory: Any = Zone, - filename: Optional[str] = None, - allow_include: bool = True, - check_origin: bool = True, - idna_codec: Optional[dns.name.IDNACodec] = None, - allow_directives: Union[bool, Iterable[str]] = True, -) -> Zone: - """Read a zone file and build a zone object. - - *f*, a file or ``str``. If *f* is a string, it is treated - as the name of a file to open. - - *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin - of the zone; if not specified, the first ``$ORIGIN`` statement in the - zone file will determine the origin of the zone. - - *rdclass*, an ``int``, the zone's rdata class; the default is class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - - *zone_factory*, the zone factory to use or ``None``. If ``None``, then - ``dns.zone.Zone`` will be used. The value may be any class or callable - that returns a subclass of ``dns.zone.Zone``. - - *filename*, a ``str`` or ``None``, the filename to emit when - describing where an error occurred; the default is ``''``. - - *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` - directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` - will raise a ``SyntaxError`` exception. - - *check_origin*, a ``bool``. If ``True``, the default, then sanity - checks of the origin node will be made by calling the zone's - ``check_origin()`` method. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, - then directives are permitted, and the *allow_include* parameter controls whether - ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive - processing is done and any directive-like text will be treated as a regular owner - name. If a non-empty iterable, then only the listed directives (including the - ``$``) are allowed. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - - Returns a subclass of ``dns.zone.Zone``. - """ - - if isinstance(f, str): - if filename is None: - filename = f - cm: contextlib.AbstractContextManager = open(f) - else: - cm = contextlib.nullcontext(f) - with cm as f: - return _from_text( - f, - origin, - rdclass, - relativize, - zone_factory, - filename, - allow_include, - check_origin, - idna_codec, - allow_directives, - ) - assert False # make mypy happy lgtm[py/unreachable-statement] - - -def from_xfr( - xfr: Any, - zone_factory: Any = Zone, - relativize: bool = True, - check_origin: bool = True, -) -> Zone: - """Convert the output of a zone transfer generator into a zone object. - - *xfr*, a generator of ``dns.message.Message`` objects, typically - ``dns.query.xfr()``. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - It is essential that the relativize setting matches the one specified - to the generator. - - *check_origin*, a ``bool``. If ``True``, the default, then sanity - checks of the origin node will be made by calling the zone's - ``check_origin()`` method. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - - Raises ``ValueError`` if no messages are yielded by the generator. - - Returns a subclass of ``dns.zone.Zone``. - """ - - z = None - for r in xfr: - if z is None: - if relativize: - origin = r.origin - else: - origin = r.answer[0].name - rdclass = r.answer[0].rdclass - z = zone_factory(origin, rdclass, relativize=relativize) - for rrset in r.answer: - znode = z.nodes.get(rrset.name) - if not znode: - znode = z.node_factory() - z.nodes[rrset.name] = znode - zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, rrset.covers, True) - zrds.update_ttl(rrset.ttl) - for rd in rrset: - zrds.add(rd) - if z is None: - raise ValueError("empty transfer") - if check_origin: - z.check_origin() - return z diff --git a/venv/lib/python3.12/site-packages/dns/zonefile.py b/venv/lib/python3.12/site-packages/dns/zonefile.py deleted file mode 100644 index d74510b2..00000000 --- a/venv/lib/python3.12/site-packages/dns/zonefile.py +++ /dev/null @@ -1,744 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Zones.""" - -import re -import sys -from typing import Any, Iterable, List, Optional, Set, Tuple, Union - -import dns.exception -import dns.grange -import dns.name -import dns.node -import dns.rdata -import dns.rdataclass -import dns.rdatatype -import dns.rdtypes.ANY.SOA -import dns.rrset -import dns.tokenizer -import dns.transaction -import dns.ttl - - -class UnknownOrigin(dns.exception.DNSException): - """Unknown origin""" - - -class CNAMEAndOtherData(dns.exception.DNSException): - """A node has a CNAME and other data""" - - -def _check_cname_and_other_data(txn, name, rdataset): - rdataset_kind = dns.node.NodeKind.classify_rdataset(rdataset) - node = txn.get_node(name) - if node is None: - # empty nodes are neutral. - return - node_kind = node.classify() - if ( - node_kind == dns.node.NodeKind.CNAME - and rdataset_kind == dns.node.NodeKind.REGULAR - ): - raise CNAMEAndOtherData("rdataset type is not compatible with a CNAME node") - elif ( - node_kind == dns.node.NodeKind.REGULAR - and rdataset_kind == dns.node.NodeKind.CNAME - ): - raise CNAMEAndOtherData( - "CNAME rdataset is not compatible with a regular data node" - ) - # Otherwise at least one of the node and the rdataset is neutral, so - # adding the rdataset is ok - - -SavedStateType = Tuple[ - dns.tokenizer.Tokenizer, - Optional[dns.name.Name], # current_origin - Optional[dns.name.Name], # last_name - Optional[Any], # current_file - int, # last_ttl - bool, # last_ttl_known - int, # default_ttl - bool, -] # default_ttl_known - - -def _upper_dollarize(s): - s = s.upper() - if not s.startswith("$"): - s = "$" + s - return s - - -class Reader: - """Read a DNS zone file into a transaction.""" - - def __init__( - self, - tok: dns.tokenizer.Tokenizer, - rdclass: dns.rdataclass.RdataClass, - txn: dns.transaction.Transaction, - allow_include: bool = False, - allow_directives: Union[bool, Iterable[str]] = True, - force_name: Optional[dns.name.Name] = None, - force_ttl: Optional[int] = None, - force_rdclass: Optional[dns.rdataclass.RdataClass] = None, - force_rdtype: Optional[dns.rdatatype.RdataType] = None, - default_ttl: Optional[int] = None, - ): - self.tok = tok - (self.zone_origin, self.relativize, _) = txn.manager.origin_information() - self.current_origin = self.zone_origin - self.last_ttl = 0 - self.last_ttl_known = False - if force_ttl is not None: - default_ttl = force_ttl - if default_ttl is None: - self.default_ttl = 0 - self.default_ttl_known = False - else: - self.default_ttl = default_ttl - self.default_ttl_known = True - self.last_name = self.current_origin - self.zone_rdclass = rdclass - self.txn = txn - self.saved_state: List[SavedStateType] = [] - self.current_file: Optional[Any] = None - self.allowed_directives: Set[str] - if allow_directives is True: - self.allowed_directives = {"$GENERATE", "$ORIGIN", "$TTL"} - if allow_include: - self.allowed_directives.add("$INCLUDE") - elif allow_directives is False: - # allow_include was ignored in earlier releases if allow_directives was - # False, so we continue that. - self.allowed_directives = set() - else: - # Note that if directives are explicitly specified, then allow_include - # is ignored. - self.allowed_directives = set(_upper_dollarize(d) for d in allow_directives) - self.force_name = force_name - self.force_ttl = force_ttl - self.force_rdclass = force_rdclass - self.force_rdtype = force_rdtype - self.txn.check_put_rdataset(_check_cname_and_other_data) - - def _eat_line(self): - while 1: - token = self.tok.get() - if token.is_eol_or_eof(): - break - - def _get_identifier(self): - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - return token - - def _rr_line(self): - """Process one line from a DNS zone file.""" - token = None - # Name - if self.force_name is not None: - name = self.force_name - else: - if self.current_origin is None: - raise UnknownOrigin - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = self.tok.as_name(token, self.current_origin) - else: - token = self.tok.get() - if token.is_eol_or_eof(): - # treat leading WS followed by EOL/EOF as if they were EOL/EOF. - return - self.tok.unget(token) - name = self.last_name - if not name.is_subdomain(self.zone_origin): - self._eat_line() - return - if self.relativize: - name = name.relativize(self.zone_origin) - - # TTL - if self.force_ttl is not None: - ttl = self.force_ttl - self.last_ttl = ttl - self.last_ttl_known = True - else: - token = self._get_identifier() - ttl = None - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = None - except dns.ttl.BadTTL: - self.tok.unget(token) - - # Class - if self.force_rdclass is not None: - rdclass = self.force_rdclass - else: - token = self._get_identifier() - try: - rdclass = dns.rdataclass.from_text(token.value) - except dns.exception.SyntaxError: - raise - except Exception: - rdclass = self.zone_rdclass - self.tok.unget(token) - if rdclass != self.zone_rdclass: - raise dns.exception.SyntaxError("RR class is not zone's class") - - if ttl is None: - # support for syntax - token = self._get_identifier() - ttl = None - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = None - except dns.ttl.BadTTL: - if self.default_ttl_known: - ttl = self.default_ttl - elif self.last_ttl_known: - ttl = self.last_ttl - self.tok.unget(token) - - # Type - if self.force_rdtype is not None: - rdtype = self.force_rdtype - else: - token = self._get_identifier() - try: - rdtype = dns.rdatatype.from_text(token.value) - except Exception: - raise dns.exception.SyntaxError(f"unknown rdatatype '{token.value}'") - - try: - rd = dns.rdata.from_text( - rdclass, - rdtype, - self.tok, - self.current_origin, - self.relativize, - self.zone_origin, - ) - except dns.exception.SyntaxError: - # Catch and reraise. - raise - except Exception: - # All exceptions that occur in the processing of rdata - # are treated as syntax errors. This is not strictly - # correct, but it is correct almost all of the time. - # We convert them to syntax errors so that we can emit - # helpful filename:line info. - (ty, va) = sys.exc_info()[:2] - raise dns.exception.SyntaxError(f"caught exception {str(ty)}: {str(va)}") - - if not self.default_ttl_known and rdtype == dns.rdatatype.SOA: - # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default - # TTL from the SOA minttl if no $TTL statement is present before the - # SOA is parsed. - self.default_ttl = rd.minimum - self.default_ttl_known = True - if ttl is None: - # if we didn't have a TTL on the SOA, set it! - ttl = rd.minimum - - # TTL check. We had to wait until now to do this as the SOA RR's - # own TTL can be inferred from its minimum. - if ttl is None: - raise dns.exception.SyntaxError("Missing default TTL value") - - self.txn.add(name, ttl, rd) - - def _parse_modify(self, side: str) -> Tuple[str, str, int, int, str]: - # Here we catch everything in '{' '}' in a group so we can replace it - # with ''. - is_generate1 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") - is_generate2 = re.compile(r"^.*\$({(\+|-?)(\d+)}).*$") - is_generate3 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+)}).*$") - # Sometimes there are modifiers in the hostname. These come after - # the dollar sign. They are in the form: ${offset[,width[,base]]}. - # Make names - mod = "" - sign = "+" - offset = "0" - width = "0" - base = "d" - g1 = is_generate1.match(side) - if g1: - mod, sign, offset, width, base = g1.groups() - if sign == "": - sign = "+" - else: - g2 = is_generate2.match(side) - if g2: - mod, sign, offset = g2.groups() - if sign == "": - sign = "+" - width = "0" - base = "d" - else: - g3 = is_generate3.match(side) - if g3: - mod, sign, offset, width = g3.groups() - if sign == "": - sign = "+" - base = "d" - - ioffset = int(offset) - iwidth = int(width) - - if sign not in ["+", "-"]: - raise dns.exception.SyntaxError(f"invalid offset sign {sign}") - if base not in ["d", "o", "x", "X", "n", "N"]: - raise dns.exception.SyntaxError(f"invalid type {base}") - - return mod, sign, ioffset, iwidth, base - - def _generate_line(self): - # range lhs [ttl] [class] type rhs [ comment ] - """Process one line containing the GENERATE statement from a DNS - zone file.""" - if self.current_origin is None: - raise UnknownOrigin - - token = self.tok.get() - # Range (required) - try: - start, stop, step = dns.grange.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except Exception: - raise dns.exception.SyntaxError - - # lhs (required) - try: - lhs = token.value - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except Exception: - raise dns.exception.SyntaxError - - # TTL - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.ttl.BadTTL: - if not (self.last_ttl_known or self.default_ttl_known): - raise dns.exception.SyntaxError("Missing default TTL value") - if self.default_ttl_known: - ttl = self.default_ttl - elif self.last_ttl_known: - ttl = self.last_ttl - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = self.zone_rdclass - if rdclass != self.zone_rdclass: - raise dns.exception.SyntaxError("RR class is not zone's class") - # Type - try: - rdtype = dns.rdatatype.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except Exception: - raise dns.exception.SyntaxError(f"unknown rdatatype '{token.value}'") - - # rhs (required) - rhs = token.value - - def _calculate_index(counter: int, offset_sign: str, offset: int) -> int: - """Calculate the index from the counter and offset.""" - if offset_sign == "-": - offset *= -1 - return counter + offset - - def _format_index(index: int, base: str, width: int) -> str: - """Format the index with the given base, and zero-fill it - to the given width.""" - if base in ["d", "o", "x", "X"]: - return format(index, base).zfill(width) - - # base can only be n or N here - hexa = _format_index(index, "x", width) - nibbles = ".".join(hexa[::-1])[:width] - if base == "N": - nibbles = nibbles.upper() - return nibbles - - lmod, lsign, loffset, lwidth, lbase = self._parse_modify(lhs) - rmod, rsign, roffset, rwidth, rbase = self._parse_modify(rhs) - for i in range(start, stop + 1, step): - # +1 because bind is inclusive and python is exclusive - - lindex = _calculate_index(i, lsign, loffset) - rindex = _calculate_index(i, rsign, roffset) - - lzfindex = _format_index(lindex, lbase, lwidth) - rzfindex = _format_index(rindex, rbase, rwidth) - - name = lhs.replace(f"${lmod}", lzfindex) - rdata = rhs.replace(f"${rmod}", rzfindex) - - self.last_name = dns.name.from_text( - name, self.current_origin, self.tok.idna_codec - ) - name = self.last_name - if not name.is_subdomain(self.zone_origin): - self._eat_line() - return - if self.relativize: - name = name.relativize(self.zone_origin) - - try: - rd = dns.rdata.from_text( - rdclass, - rdtype, - rdata, - self.current_origin, - self.relativize, - self.zone_origin, - ) - except dns.exception.SyntaxError: - # Catch and reraise. - raise - except Exception: - # All exceptions that occur in the processing of rdata - # are treated as syntax errors. This is not strictly - # correct, but it is correct almost all of the time. - # We convert them to syntax errors so that we can emit - # helpful filename:line info. - (ty, va) = sys.exc_info()[:2] - raise dns.exception.SyntaxError( - f"caught exception {str(ty)}: {str(va)}" - ) - - self.txn.add(name, ttl, rd) - - def read(self) -> None: - """Read a DNS zone file and build a zone object. - - @raises dns.zone.NoSOA: No SOA RR was found at the zone origin - @raises dns.zone.NoNS: No NS RRset was found at the zone origin - """ - - try: - while 1: - token = self.tok.get(True, True) - if token.is_eof(): - if self.current_file is not None: - self.current_file.close() - if len(self.saved_state) > 0: - ( - self.tok, - self.current_origin, - self.last_name, - self.current_file, - self.last_ttl, - self.last_ttl_known, - self.default_ttl, - self.default_ttl_known, - ) = self.saved_state.pop(-1) - continue - break - elif token.is_eol(): - continue - elif token.is_comment(): - self.tok.get_eol() - continue - elif token.value[0] == "$" and len(self.allowed_directives) > 0: - # Note that we only run directive processing code if at least - # one directive is allowed in order to be backwards compatible - c = token.value.upper() - if c not in self.allowed_directives: - raise dns.exception.SyntaxError( - f"zone file directive '{c}' is not allowed" - ) - if c == "$TTL": - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError("bad $TTL") - self.default_ttl = dns.ttl.from_text(token.value) - self.default_ttl_known = True - self.tok.get_eol() - elif c == "$ORIGIN": - self.current_origin = self.tok.get_name() - self.tok.get_eol() - if self.zone_origin is None: - self.zone_origin = self.current_origin - self.txn._set_origin(self.current_origin) - elif c == "$INCLUDE": - token = self.tok.get() - filename = token.value - token = self.tok.get() - new_origin: Optional[dns.name.Name] - if token.is_identifier(): - new_origin = dns.name.from_text( - token.value, self.current_origin, self.tok.idna_codec - ) - self.tok.get_eol() - elif not token.is_eol_or_eof(): - raise dns.exception.SyntaxError("bad origin in $INCLUDE") - else: - new_origin = self.current_origin - self.saved_state.append( - ( - self.tok, - self.current_origin, - self.last_name, - self.current_file, - self.last_ttl, - self.last_ttl_known, - self.default_ttl, - self.default_ttl_known, - ) - ) - self.current_file = open(filename) - self.tok = dns.tokenizer.Tokenizer(self.current_file, filename) - self.current_origin = new_origin - elif c == "$GENERATE": - self._generate_line() - else: - raise dns.exception.SyntaxError( - f"Unknown zone file directive '{c}'" - ) - continue - self.tok.unget(token) - self._rr_line() - except dns.exception.SyntaxError as detail: - (filename, line_number) = self.tok.where() - if detail is None: - detail = "syntax error" - ex = dns.exception.SyntaxError( - "%s:%d: %s" % (filename, line_number, detail) - ) - tb = sys.exc_info()[2] - raise ex.with_traceback(tb) from None - - -class RRsetsReaderTransaction(dns.transaction.Transaction): - def __init__(self, manager, replacement, read_only): - assert not read_only - super().__init__(manager, replacement, read_only) - self.rdatasets = {} - - def _get_rdataset(self, name, rdtype, covers): - return self.rdatasets.get((name, rdtype, covers)) - - def _get_node(self, name): - rdatasets = [] - for (rdataset_name, _, _), rdataset in self.rdatasets.items(): - if name == rdataset_name: - rdatasets.append(rdataset) - if len(rdatasets) == 0: - return None - node = dns.node.Node() - node.rdatasets = rdatasets - return node - - def _put_rdataset(self, name, rdataset): - self.rdatasets[(name, rdataset.rdtype, rdataset.covers)] = rdataset - - def _delete_name(self, name): - # First remove any changes involving the name - remove = [] - for key in self.rdatasets: - if key[0] == name: - remove.append(key) - if len(remove) > 0: - for key in remove: - del self.rdatasets[key] - - def _delete_rdataset(self, name, rdtype, covers): - try: - del self.rdatasets[(name, rdtype, covers)] - except KeyError: - pass - - def _name_exists(self, name): - for n, _, _ in self.rdatasets: - if n == name: - return True - return False - - def _changed(self): - return len(self.rdatasets) > 0 - - def _end_transaction(self, commit): - if commit and self._changed(): - rrsets = [] - for (name, _, _), rdataset in self.rdatasets.items(): - rrset = dns.rrset.RRset( - name, rdataset.rdclass, rdataset.rdtype, rdataset.covers - ) - rrset.update(rdataset) - rrsets.append(rrset) - self.manager.set_rrsets(rrsets) - - def _set_origin(self, origin): - pass - - def _iterate_rdatasets(self): - raise NotImplementedError # pragma: no cover - - def _iterate_names(self): - raise NotImplementedError # pragma: no cover - - -class RRSetsReaderManager(dns.transaction.TransactionManager): - def __init__( - self, origin=dns.name.root, relativize=False, rdclass=dns.rdataclass.IN - ): - self.origin = origin - self.relativize = relativize - self.rdclass = rdclass - self.rrsets = [] - - def reader(self): # pragma: no cover - raise NotImplementedError - - def writer(self, replacement=False): - assert replacement is True - return RRsetsReaderTransaction(self, True, False) - - def get_class(self): - return self.rdclass - - def origin_information(self): - if self.relativize: - effective = dns.name.empty - else: - effective = self.origin - return (self.origin, self.relativize, effective) - - def set_rrsets(self, rrsets): - self.rrsets = rrsets - - -def read_rrsets( - text: Any, - name: Optional[Union[dns.name.Name, str]] = None, - ttl: Optional[int] = None, - rdclass: Optional[Union[dns.rdataclass.RdataClass, str]] = dns.rdataclass.IN, - default_rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, - default_ttl: Optional[Union[int, str]] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, - origin: Optional[Union[dns.name.Name, str]] = dns.name.root, - relativize: bool = False, -) -> List[dns.rrset.RRset]: - """Read one or more rrsets from the specified text, possibly subject - to restrictions. - - *text*, a file object or a string, is the input to process. - - *name*, a string, ``dns.name.Name``, or ``None``, is the owner name of - the rrset. If not ``None``, then the owner name is "forced", and the - input must not specify an owner name. If ``None``, then any owner names - are allowed and must be present in the input. - - *ttl*, an ``int``, string, or None. If not ``None``, the the TTL is - forced to be the specified value and the input must not specify a TTL. - If ``None``, then a TTL may be specified in the input. If it is not - specified, then the *default_ttl* will be used. - - *rdclass*, a ``dns.rdataclass.RdataClass``, string, or ``None``. If - not ``None``, then the class is forced to the specified value, and the - input must not specify a class. If ``None``, then the input may specify - a class that matches *default_rdclass*. Note that it is not possible to - return rrsets with differing classes; specifying ``None`` for the class - simply allows the user to optionally type a class as that may be convenient - when cutting and pasting. - - *default_rdclass*, a ``dns.rdataclass.RdataClass`` or string. The class - of the returned rrsets. - - *rdtype*, a ``dns.rdatatype.RdataType``, string, or ``None``. If not - ``None``, then the type is forced to the specified value, and the - input must not specify a type. If ``None``, then a type must be present - for each RR. - - *default_ttl*, an ``int``, string, or ``None``. If not ``None``, then if - the TTL is not forced and is not specified, then this value will be used. - if ``None``, then if the TTL is not forced an error will occur if the TTL - is not specified. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. Note that codecs only apply to the owner name; dnspython does - not do IDNA for names in rdata, as there is no IDNA zonefile format. - - *origin*, a string, ``dns.name.Name``, or ``None``, is the origin for any - relative names in the input, and also the origin to relativize to if - *relativize* is ``True``. - - *relativize*, a bool. If ``True``, names are relativized to the *origin*; - if ``False`` then any relative names in the input are made absolute by - appending the *origin*. - """ - if isinstance(origin, str): - origin = dns.name.from_text(origin, dns.name.root, idna_codec) - if isinstance(name, str): - name = dns.name.from_text(name, origin, idna_codec) - if isinstance(ttl, str): - ttl = dns.ttl.from_text(ttl) - if isinstance(default_ttl, str): - default_ttl = dns.ttl.from_text(default_ttl) - if rdclass is not None: - rdclass = dns.rdataclass.RdataClass.make(rdclass) - else: - rdclass = None - default_rdclass = dns.rdataclass.RdataClass.make(default_rdclass) - if rdtype is not None: - rdtype = dns.rdatatype.RdataType.make(rdtype) - else: - rdtype = None - manager = RRSetsReaderManager(origin, relativize, default_rdclass) - with manager.writer(True) as txn: - tok = dns.tokenizer.Tokenizer(text, "", idna_codec=idna_codec) - reader = Reader( - tok, - default_rdclass, - txn, - allow_directives=False, - force_name=name, - force_ttl=ttl, - force_rdclass=rdclass, - force_rdtype=rdtype, - default_ttl=default_ttl, - ) - reader.read() - return manager.rrsets diff --git a/venv/lib/python3.12/site-packages/dns/zonetypes.py b/venv/lib/python3.12/site-packages/dns/zonetypes.py deleted file mode 100644 index 195ee2ec..00000000 --- a/venv/lib/python3.12/site-packages/dns/zonetypes.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""Common zone-related types.""" - -# This is a separate file to avoid import circularity between dns.zone and -# the implementation of the ZONEMD type. - -import hashlib - -import dns.enum - - -class DigestScheme(dns.enum.IntEnum): - """ZONEMD Scheme""" - - SIMPLE = 1 - - @classmethod - def _maximum(cls): - return 255 - - -class DigestHashAlgorithm(dns.enum.IntEnum): - """ZONEMD Hash Algorithm""" - - SHA384 = 1 - SHA512 = 2 - - @classmethod - def _maximum(cls): - return 255 - - -_digest_hashers = { - DigestHashAlgorithm.SHA384: hashlib.sha384, - DigestHashAlgorithm.SHA512: hashlib.sha512, -} diff --git a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/METADATA deleted file mode 100644 index ca4a4f4a..00000000 --- a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/METADATA +++ /dev/null @@ -1,149 +0,0 @@ -Metadata-Version: 2.3 -Name: dnspython -Version: 2.7.0 -Summary: DNS toolkit -Project-URL: homepage, https://www.dnspython.org -Project-URL: repository, https://github.com/rthalley/dnspython.git -Project-URL: documentation, https://dnspython.readthedocs.io/en/stable/ -Project-URL: issues, https://github.com/rthalley/dnspython/issues -Author-email: Bob Halley -License: ISC -License-File: LICENSE -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: ISC License (ISCL) -Classifier: Operating System :: Microsoft :: Windows -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.9 -Provides-Extra: dev -Requires-Dist: black>=23.1.0; extra == 'dev' -Requires-Dist: coverage>=7.0; extra == 'dev' -Requires-Dist: flake8>=7; extra == 'dev' -Requires-Dist: hypercorn>=0.16.0; extra == 'dev' -Requires-Dist: mypy>=1.8; extra == 'dev' -Requires-Dist: pylint>=3; extra == 'dev' -Requires-Dist: pytest-cov>=4.1.0; extra == 'dev' -Requires-Dist: pytest>=7.4; extra == 'dev' -Requires-Dist: quart-trio>=0.11.0; extra == 'dev' -Requires-Dist: sphinx-rtd-theme>=2.0.0; extra == 'dev' -Requires-Dist: sphinx>=7.2.0; extra == 'dev' -Requires-Dist: twine>=4.0.0; extra == 'dev' -Requires-Dist: wheel>=0.42.0; extra == 'dev' -Provides-Extra: dnssec -Requires-Dist: cryptography>=43; extra == 'dnssec' -Provides-Extra: doh -Requires-Dist: h2>=4.1.0; extra == 'doh' -Requires-Dist: httpcore>=1.0.0; extra == 'doh' -Requires-Dist: httpx>=0.26.0; extra == 'doh' -Provides-Extra: doq -Requires-Dist: aioquic>=1.0.0; extra == 'doq' -Provides-Extra: idna -Requires-Dist: idna>=3.7; extra == 'idna' -Provides-Extra: trio -Requires-Dist: trio>=0.23; extra == 'trio' -Provides-Extra: wmi -Requires-Dist: wmi>=1.5.1; extra == 'wmi' -Description-Content-Type: text/markdown - -# dnspython - -[![Build Status](https://github.com/rthalley/dnspython/actions/workflows/ci.yml/badge.svg)](https://github.com/rthalley/dnspython/actions/) -[![Documentation Status](https://readthedocs.org/projects/dnspython/badge/?version=latest)](https://dnspython.readthedocs.io/en/latest/?badge=latest) -[![PyPI version](https://badge.fury.io/py/dnspython.svg)](https://badge.fury.io/py/dnspython) -[![License: ISC](https://img.shields.io/badge/License-ISC-brightgreen.svg)](https://opensource.org/licenses/ISC) -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) - -## INTRODUCTION - -dnspython is a DNS toolkit for Python. It supports almost all record types. It -can be used for queries, zone transfers, and dynamic updates. It supports TSIG -authenticated messages and EDNS0. - -dnspython provides both high and low level access to DNS. The high level classes -perform queries for data of a given name, type, and class, and return an answer -set. The low level classes allow direct manipulation of DNS zones, messages, -names, and records. - -To see a few of the ways dnspython can be used, look in the `examples/` -directory. - -dnspython is a utility to work with DNS, `/etc/hosts` is thus not used. For -simple forward DNS lookups, it's better to use `socket.getaddrinfo()` or -`socket.gethostbyname()`. - -dnspython originated at Nominum where it was developed -to facilitate the testing of DNS software. - -## ABOUT THIS RELEASE - -This is dnspython 2.7.0. -Please read -[What's New](https://dnspython.readthedocs.io/en/stable/whatsnew.html) for -information about the changes in this release. - -## INSTALLATION - -* Many distributions have dnspython packaged for you, so you should - check there first. -* To use a wheel downloaded from PyPi, run: - - pip install dnspython - -* To install from the source code, go into the top-level of the source code - and run: - -``` - pip install --upgrade pip build - python -m build - pip install dist/*.whl -``` - -* To install the latest from the main branch, run `pip install git+https://github.com/rthalley/dnspython.git` - -Dnspython's default installation does not depend on any modules other than -those in the Python standard library. To use some features, additional modules -must be installed. For convenience, pip options are defined for the -requirements. - -If you want to use DNS-over-HTTPS, run -`pip install dnspython[doh]`. - -If you want to use DNSSEC functionality, run -`pip install dnspython[dnssec]`. - -If you want to use internationalized domain names (IDNA) -functionality, you must run -`pip install dnspython[idna]` - -If you want to use the Trio asynchronous I/O package, run -`pip install dnspython[trio]`. - -If you want to use WMI on Windows to determine the active DNS settings -instead of the default registry scanning method, run -`pip install dnspython[wmi]`. - -If you want to try the experimental DNS-over-QUIC code, run -`pip install dnspython[doq]`. - -Note that you can install any combination of the above, e.g.: -`pip install dnspython[doh,dnssec,idna]` - -### Notices - -Python 2.x support ended with the release of 1.16.0. Dnspython 2.6.x supports -Python 3.8 and later, though support for 3.8 ends on October 14, 2024. -Dnspython 2.7.x supports Python 3.9 and later. Future support is aligned with the -lifetime of the Python 3 versions. - -Documentation has moved to -[dnspython.readthedocs.io](https://dnspython.readthedocs.io). diff --git a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/RECORD deleted file mode 100644 index f680b5dc..00000000 --- a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/RECORD +++ /dev/null @@ -1,294 +0,0 @@ -dns/__init__.py,sha256=YJZtDG14Idw5ui3h1nWooSwPM9gsxQgB8M0GBZ3aly0,1663 -dns/__pycache__/__init__.cpython-312.pyc,, -dns/__pycache__/_asyncbackend.cpython-312.pyc,, -dns/__pycache__/_asyncio_backend.cpython-312.pyc,, -dns/__pycache__/_ddr.cpython-312.pyc,, -dns/__pycache__/_features.cpython-312.pyc,, -dns/__pycache__/_immutable_ctx.cpython-312.pyc,, -dns/__pycache__/_trio_backend.cpython-312.pyc,, -dns/__pycache__/asyncbackend.cpython-312.pyc,, -dns/__pycache__/asyncquery.cpython-312.pyc,, -dns/__pycache__/asyncresolver.cpython-312.pyc,, -dns/__pycache__/dnssec.cpython-312.pyc,, -dns/__pycache__/dnssectypes.cpython-312.pyc,, -dns/__pycache__/e164.cpython-312.pyc,, -dns/__pycache__/edns.cpython-312.pyc,, -dns/__pycache__/entropy.cpython-312.pyc,, -dns/__pycache__/enum.cpython-312.pyc,, -dns/__pycache__/exception.cpython-312.pyc,, -dns/__pycache__/flags.cpython-312.pyc,, -dns/__pycache__/grange.cpython-312.pyc,, -dns/__pycache__/immutable.cpython-312.pyc,, -dns/__pycache__/inet.cpython-312.pyc,, -dns/__pycache__/ipv4.cpython-312.pyc,, -dns/__pycache__/ipv6.cpython-312.pyc,, -dns/__pycache__/message.cpython-312.pyc,, -dns/__pycache__/name.cpython-312.pyc,, -dns/__pycache__/namedict.cpython-312.pyc,, -dns/__pycache__/nameserver.cpython-312.pyc,, -dns/__pycache__/node.cpython-312.pyc,, -dns/__pycache__/opcode.cpython-312.pyc,, -dns/__pycache__/query.cpython-312.pyc,, -dns/__pycache__/rcode.cpython-312.pyc,, -dns/__pycache__/rdata.cpython-312.pyc,, -dns/__pycache__/rdataclass.cpython-312.pyc,, -dns/__pycache__/rdataset.cpython-312.pyc,, -dns/__pycache__/rdatatype.cpython-312.pyc,, -dns/__pycache__/renderer.cpython-312.pyc,, -dns/__pycache__/resolver.cpython-312.pyc,, -dns/__pycache__/reversename.cpython-312.pyc,, -dns/__pycache__/rrset.cpython-312.pyc,, -dns/__pycache__/serial.cpython-312.pyc,, -dns/__pycache__/set.cpython-312.pyc,, -dns/__pycache__/tokenizer.cpython-312.pyc,, -dns/__pycache__/transaction.cpython-312.pyc,, -dns/__pycache__/tsig.cpython-312.pyc,, -dns/__pycache__/tsigkeyring.cpython-312.pyc,, -dns/__pycache__/ttl.cpython-312.pyc,, -dns/__pycache__/update.cpython-312.pyc,, -dns/__pycache__/version.cpython-312.pyc,, -dns/__pycache__/versioned.cpython-312.pyc,, -dns/__pycache__/win32util.cpython-312.pyc,, -dns/__pycache__/wire.cpython-312.pyc,, -dns/__pycache__/xfr.cpython-312.pyc,, -dns/__pycache__/zone.cpython-312.pyc,, -dns/__pycache__/zonefile.cpython-312.pyc,, -dns/__pycache__/zonetypes.cpython-312.pyc,, -dns/_asyncbackend.py,sha256=pamIAWJ73e7ic2u7Q3RJyG6_6L8t78ccttvi65682MM,2396 -dns/_asyncio_backend.py,sha256=iLqhcUXqnFWC_2tcAp9U00NOGxT5GKPn4qeXS4iKaro,9051 -dns/_ddr.py,sha256=rHXKC8kncCTT9N4KBh1flicl79nyDjQ-DDvq30MJ3B8,5247 -dns/_features.py,sha256=Ig_leAKUT9RDiOVOfA0nXmmqpiPfnOnP9TcxlISUGSk,2492 -dns/_immutable_ctx.py,sha256=gtoCLMmdHXI23zt5lRSIS3A4Ca3jZJngebdoFFOtiwU,2459 -dns/_trio_backend.py,sha256=IXNdUP1MUBPyZRgAFhGH71KHtUCh3Rm5dM8SX4bMj2U,8473 -dns/asyncbackend.py,sha256=82fXTFls_m7F_ekQbgUGOkoBbs4BI-GBLDZAWNGUvJ0,2796 -dns/asyncquery.py,sha256=PMZ_D4Z8vgSioWHftyxNw7eax1IqrPleqY5FIi40hd8,30821 -dns/asyncresolver.py,sha256=GD86dCyW9YGKs6SggWXwBKEXifW7Qdx4cEAGFKY6fA4,17852 -dns/dnssec.py,sha256=gXmIrbKK1t1hE8ht-WlhUc0giy1PpLYj07r6o0pVATY,41717 -dns/dnssecalgs/__init__.py,sha256=OWvTadxZ3oF5PxVGodNimxBt_-3YUNTOSV62HrIb4PQ,4331 -dns/dnssecalgs/__pycache__/__init__.cpython-312.pyc,, -dns/dnssecalgs/__pycache__/base.cpython-312.pyc,, -dns/dnssecalgs/__pycache__/cryptography.cpython-312.pyc,, -dns/dnssecalgs/__pycache__/dsa.cpython-312.pyc,, -dns/dnssecalgs/__pycache__/ecdsa.cpython-312.pyc,, -dns/dnssecalgs/__pycache__/eddsa.cpython-312.pyc,, -dns/dnssecalgs/__pycache__/rsa.cpython-312.pyc,, -dns/dnssecalgs/base.py,sha256=jlTV_nd1Nqkvqyf-FVHIccXKFrE2LL6GVu6AW8QUh2E,2513 -dns/dnssecalgs/cryptography.py,sha256=3uqMfRm-zCkJPOrxUqlu9CmdxIMy71dVor9eAHi0wZM,2425 -dns/dnssecalgs/dsa.py,sha256=DNO68g_lbG7_oKcDN8c2xuzYRPbLaZc9Ns7oQoa0Vbc,3564 -dns/dnssecalgs/ecdsa.py,sha256=RfvFKRNExsYgd5SoXXRxMHkoBeF2Gktkz2rOwObEYAY,3172 -dns/dnssecalgs/eddsa.py,sha256=7VGARpVUzIYRjPh0gFapTPFzmsK8WJDqDZDLw2KLc8w,1981 -dns/dnssecalgs/rsa.py,sha256=_tNABpr6iwd8STBEHYIXfyLrgBpRNCj8K0UQj32_kOU,3622 -dns/dnssectypes.py,sha256=CyeuGTS_rM3zXr8wD9qMT9jkzvVfTY2JWckUcogG83E,1799 -dns/e164.py,sha256=EsK8cnOtOx7kQ0DmSwibcwkzp6efMWjbRiTyHZO8Q-M,3978 -dns/edns.py,sha256=-XDhC2jr7BRLsJrpCAWShxLn-3eG1oI0HhduWhLxdMw,17089 -dns/entropy.py,sha256=qkG8hXDLzrJS6R5My26iA59c0RhPwJNzuOhOCAZU5Bw,4242 -dns/enum.py,sha256=EepaunPKixTSrascy7iAe9UQEXXxP_MB5Gx4jUpHIhg,3691 -dns/exception.py,sha256=8vjxLf4T3T77vfANe_iKVeButAEhSJve6UrPjiBzht4,5953 -dns/flags.py,sha256=cQ3kTFyvcKiWHAxI5AwchNqxVOrsIrgJ6brgrH42Wq8,2750 -dns/grange.py,sha256=D016OrOv3i44G3mb_CzPFjDk61uZ6BMRib3yJnDQvbw,2144 -dns/immutable.py,sha256=InrtpKvPxl-74oYbzsyneZwAuX78hUqeG22f2aniZbk,2017 -dns/inet.py,sha256=j6jQs3K_ehVhDv-i4jwCKePr5HpEiSzvOXQ4uhgn1sU,5772 -dns/ipv4.py,sha256=qEUXtlqWDH_blicj6VMvyQhfX7-BF0gB_lWJliV-2FI,2552 -dns/ipv6.py,sha256=Ww8ayshM6FxtQsRYdXXuKkPFTad5ZcGbBd9lr1nFct4,6554 -dns/message.py,sha256=QOtdFBEAORhTKN0uQg86uSNvthdxJx40HhMQXYCBHng,68185 -dns/name.py,sha256=Bf3170QHhLFLDnMsWeJyik4i9ucBDbIY6Bydcz8H-2o,42778 -dns/namedict.py,sha256=hJRYpKeQv6Bd2LaUOPV0L_a0eXEIuqgggPXaH4c3Tow,4000 -dns/nameserver.py,sha256=hH4LLOkB4jeyO3VDUWK0lNpMJNNt_cFYf23-HdhpSmE,10115 -dns/node.py,sha256=NGZa0AUMq-CNledJ6wn1Rx6TFYc703cH2OraLysoNWM,12663 -dns/opcode.py,sha256=I6JyuFUL0msja_BYm6bzXHfbbfqUod_69Ss4xcv8xWQ,2730 -dns/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -dns/query.py,sha256=_Ev7EivZNEpgrUiPIn4BVnDRFCizcayHHcBXt0Ju3As,56298 -dns/quic/__init__.py,sha256=S5_2UuYzSU_LLtrLAf8DHh3KqNF2YHeKJ_-Wv991WlI,2272 -dns/quic/__pycache__/__init__.cpython-312.pyc,, -dns/quic/__pycache__/_asyncio.cpython-312.pyc,, -dns/quic/__pycache__/_common.cpython-312.pyc,, -dns/quic/__pycache__/_sync.cpython-312.pyc,, -dns/quic/__pycache__/_trio.cpython-312.pyc,, -dns/quic/_asyncio.py,sha256=dnABPz5f-JOJsA7D_BdPfuyzpkL_87AaY4CUcmgNj-g,9870 -dns/quic/_common.py,sha256=koWf6rq9_gUorIOV60QZKAHwZF5MuSgQvBznzA5rGzk,10857 -dns/quic/_sync.py,sha256=QF-dW19NwiDW_BDoJZkSQmHz2uEpfgedsUKTPc0JAio,10436 -dns/quic/_trio.py,sha256=01HH4_hU1VRx-BWXl8bQo4-LZem_eKRBNy6RolTZZXY,9248 -dns/rcode.py,sha256=N6JjrIQjCdJy0boKIp8Hcky5tm__LSDscpDz3rE_sgU,4156 -dns/rdata.py,sha256=uk82eldqpWR8L2zp_CB8JG6wWWfK7zdYowWISfMC2XE,31022 -dns/rdataclass.py,sha256=TK4W4ywB1L_X7EZqk2Gmwnu7vdQpolQF5DtQWyNk5xo,2984 -dns/rdataset.py,sha256=BMNvGAzE4HfYHA-pnhsKwELfpr-saz73BzYwMucoKj0,16664 -dns/rdatatype.py,sha256=wgKWnu4mAbXnmG8wKHpV8dZHkhMqNeSsWWlWFo5HcDY,7448 -dns/rdtypes/ANY/AFSDB.py,sha256=k75wMwreF1DAfDymu4lHh16BUx7ulVP3PLeQBZnkurY,1661 -dns/rdtypes/ANY/AMTRELAY.py,sha256=19jfS61mT1CQT-8vf67ZylhDS9JVRVp4WCbFE-7l0jM,3381 -dns/rdtypes/ANY/AVC.py,sha256=SpsXYzlBirRWN0mGnQe0MdN6H8fvlgXPJX5PjOHnEak,1024 -dns/rdtypes/ANY/CAA.py,sha256=AHh59Is-4WiVWd26yovnPM3hXqKS-yx7IWfXSS0NZhE,2511 -dns/rdtypes/ANY/CDNSKEY.py,sha256=bJAdrBMsFHIJz8TF1AxZoNbdxVWBCRTG-bR_uR_r_G4,1225 -dns/rdtypes/ANY/CDS.py,sha256=Y9nIRUCAabztVLbxm2SXAdYapFemCOUuGh5JqroCDUs,1163 -dns/rdtypes/ANY/CERT.py,sha256=2Cu2LQM6-K4darqhHv1EM_blmpYpnrBIIX1GnL_rxKE,3533 -dns/rdtypes/ANY/CNAME.py,sha256=IHGGq2BDpeKUahTr1pvyBQgm0NGBI_vQ3Vs5mKTXO4w,1206 -dns/rdtypes/ANY/CSYNC.py,sha256=KkZ_rG6PfeL14il97nmJGWWmUGGS5o9nd2EqbJqOuYo,2439 -dns/rdtypes/ANY/DLV.py,sha256=J-pOrw5xXsDoaB9G0r6znlYXJtqtcqhsl1OXs6CPRU4,986 -dns/rdtypes/ANY/DNAME.py,sha256=yqXRtx4dAWwB4YCCv-qW6uaxeGhg2LPQ2uyKwWaMdXs,1150 -dns/rdtypes/ANY/DNSKEY.py,sha256=MD8HUVH5XXeAGOnFWg5aVz_w-2tXYwCeVXmzExhiIeQ,1223 -dns/rdtypes/ANY/DS.py,sha256=_gf8vk1O_uY8QXFjsfUw-bny-fm6e-QpCk3PT0JCyoM,995 -dns/rdtypes/ANY/EUI48.py,sha256=x0BkK0sY_tgzuCwfDYpw6tyuChHjjtbRpAgYhO0Y44o,1151 -dns/rdtypes/ANY/EUI64.py,sha256=1jCff2-SXHJLDnNDnMW8Cd_o-ok0P3x6zKy_bcCU5h4,1161 -dns/rdtypes/ANY/GPOS.py,sha256=u4qwiDBVoC7bsKfxDKGbPjnOKddpdjy2p1AhziDWcPw,4439 -dns/rdtypes/ANY/HINFO.py,sha256=D2WvjTsvD_XqT8BepBIyjPL2iYGMgYqb1VQa9ApO0qE,2217 -dns/rdtypes/ANY/HIP.py,sha256=c32Ewlk88schJ1nPOmT5BVR60ttIM-uH8I8LaRAkFOA,3226 -dns/rdtypes/ANY/ISDN.py,sha256=L4C2Rxrr4JJN17lmJRbZN8RhM_ujjwIskY_4V4Gd3r4,2723 -dns/rdtypes/ANY/L32.py,sha256=TMz2kdGCd0siiQZyiocVDCSnvkOdjhUuYRFyf8o622M,1286 -dns/rdtypes/ANY/L64.py,sha256=sb2BjuPA0PQt67nEyT9rBt759C9e6lH71d3EJHGGnww,1592 -dns/rdtypes/ANY/LOC.py,sha256=NZKIUJULZ3BcK1-gnb2Mk76Pc4UUZry47C5n9VBvhnk,11995 -dns/rdtypes/ANY/LP.py,sha256=wTsKIjtK6vh66qZRLSsiE0k54GO8ieVBGZH8dzVvFnE,1338 -dns/rdtypes/ANY/MX.py,sha256=qQk83idY0-SbRMDmB15JOpJi7cSyiheF-ALUD0Ev19E,995 -dns/rdtypes/ANY/NID.py,sha256=N7Xx4kXf3yVAocTlCXQeJ3BtiQNPFPQVdL1iMuyl5W4,1544 -dns/rdtypes/ANY/NINFO.py,sha256=bdL_-6Bejb2EH-xwR1rfSr_9E3SDXLTAnov7x2924FI,1041 -dns/rdtypes/ANY/NS.py,sha256=ThfaPalUlhbyZyNyvBM3k-7onl3eJKq5wCORrOGtkMM,995 -dns/rdtypes/ANY/NSEC.py,sha256=kicEYxcKaLBpV6C_M8cHdDaqBoiYl6EYtPvjyR6kExI,2465 -dns/rdtypes/ANY/NSEC3.py,sha256=696h-Zz30bmcT0n1rqoEtS5wqE6jIgsVGzaw5TfdGJo,4331 -dns/rdtypes/ANY/NSEC3PARAM.py,sha256=08p6NWS4DiLav1wOuPbxUxB9MtY2IPjfOMCtJwzzMuA,2635 -dns/rdtypes/ANY/OPENPGPKEY.py,sha256=Va0FGo_8vm1OeX62N5iDTWukAdLwrjTXIZeQ6oanE78,1851 -dns/rdtypes/ANY/OPT.py,sha256=W36RslT_Psp95OPUC70knumOYjKpaRHvGT27I-NV2qc,2561 -dns/rdtypes/ANY/PTR.py,sha256=5HcR1D77Otyk91vVY4tmqrfZfSxSXWyWvwIW-rIH5gc,997 -dns/rdtypes/ANY/RESINFO.py,sha256=Kf2NcKbkeI5gFE1bJfQNqQCaitYyXfV_9nQYl1luUZ0,1008 -dns/rdtypes/ANY/RP.py,sha256=8doJlhjYDYiAT6KNF1mAaemJ20YJFUPvit8LOx4-I-U,2174 -dns/rdtypes/ANY/RRSIG.py,sha256=O8vwzS7ldfaj_x8DypvEGFsDSb7al-D7OEnprA3QQoo,4922 -dns/rdtypes/ANY/RT.py,sha256=2t9q3FZQ28iEyceeU25KU2Ur0T5JxELAu8BTwfOUgVw,1013 -dns/rdtypes/ANY/SMIMEA.py,sha256=6yjHuVDfIEodBU9wxbCGCDZ5cWYwyY6FCk-aq2VNU0s,222 -dns/rdtypes/ANY/SOA.py,sha256=Cn8yrag1YvrvwivQgWg-KXmOCaVQVdFHSkFF77w-CE0,3145 -dns/rdtypes/ANY/SPF.py,sha256=rA3Srs9ECQx-37lqm7Zf7aYmMpp_asv4tGS8_fSQ-CU,1022 -dns/rdtypes/ANY/SSHFP.py,sha256=l6TZH2R0kytiZGWez_g-Lq94o5a2xMuwLKwUwsPMx5w,2530 -dns/rdtypes/ANY/TKEY.py,sha256=1ecTuBse2b4QPH2qmx3vn-gfPK0INcKXfxrIyAJxFHA,4927 -dns/rdtypes/ANY/TLSA.py,sha256=cytzebS3W7FFr9qeJ9gFSHq_bOwUk9aRVlXWHfnVrRs,218 -dns/rdtypes/ANY/TSIG.py,sha256=4fNQJSNWZXUKZejCciwQuUJtTw2g-YbPmqHrEj_pitg,4750 -dns/rdtypes/ANY/TXT.py,sha256=F1U9gIAhwXIV4UVT7CwOCEn_su6G1nJIdgWJsLktk20,1000 -dns/rdtypes/ANY/URI.py,sha256=dpcS8KwcJ2WJ7BkOp4CZYaUyRuw7U2S9GzvVwKUihQg,2921 -dns/rdtypes/ANY/WALLET.py,sha256=IaP2g7Nq26jWGKa8MVxvJjWXLQ0wrNR1IWJVyyMG8oU,219 -dns/rdtypes/ANY/X25.py,sha256=BzEM7uOY7CMAm7QN-dSLj-_LvgnnohwJDUjMstzwqYo,1942 -dns/rdtypes/ANY/ZONEMD.py,sha256=JQicv69EvUxh4FCT7eZSLzzU5L5brw_dSM65Um2t5lQ,2393 -dns/rdtypes/ANY/__init__.py,sha256=My5jT8T5bA66zBydmRSxkmDCFxwI81B4DBRA_S36IL8,1526 -dns/rdtypes/ANY/__pycache__/AFSDB.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/AVC.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/CAA.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/CDS.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/CERT.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/CNAME.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/CSYNC.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/DLV.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/DNAME.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/DS.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/EUI48.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/EUI64.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/GPOS.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/HINFO.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/HIP.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/ISDN.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/L32.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/L64.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/LOC.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/LP.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/MX.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/NID.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/NINFO.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/NS.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC3.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/OPT.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/PTR.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/RESINFO.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/RP.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/RRSIG.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/RT.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/SOA.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/SPF.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/SSHFP.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/TKEY.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/TLSA.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/TSIG.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/TXT.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/URI.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/WALLET.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/X25.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-312.pyc,, -dns/rdtypes/ANY/__pycache__/__init__.cpython-312.pyc,, -dns/rdtypes/CH/A.py,sha256=-4G3ASZGj7oUlPfDxADibAB1WfTsZBavUO8ghDWarJ8,2212 -dns/rdtypes/CH/__init__.py,sha256=GD9YeDKb9VBDo-J5rrChX1MWEGyQXuR9Htnbhg_iYLc,923 -dns/rdtypes/CH/__pycache__/A.cpython-312.pyc,, -dns/rdtypes/CH/__pycache__/__init__.cpython-312.pyc,, -dns/rdtypes/IN/A.py,sha256=FfFn3SqbpneL9Ky63COP50V2ZFxqS1ldCKJh39Enwug,1814 -dns/rdtypes/IN/AAAA.py,sha256=AxrOlYy-1TTTWeQypDKeXrDCrdHGor0EKCE4fxzSQGo,1820 -dns/rdtypes/IN/APL.py,sha256=ppyFwn0KYMdyDzphxd0BUhgTmZv0QnDMRLjzQQM793U,5097 -dns/rdtypes/IN/DHCID.py,sha256=zRUh_EOxUPVpJjWY5m7taX8q4Oz5K70785ZtKv5OTCU,1856 -dns/rdtypes/IN/HTTPS.py,sha256=P-IjwcvDQMmtoBgsDHglXF7KgLX73G6jEDqCKsnaGpQ,220 -dns/rdtypes/IN/IPSECKEY.py,sha256=RyIy9K0Yt0uJRjdr6cj5S95ELHHbl--0xV-Qq9O3QQk,3290 -dns/rdtypes/IN/KX.py,sha256=K1JwItL0n5G-YGFCjWeh0C9DyDD8G8VzicsBeQiNAv0,1013 -dns/rdtypes/IN/NAPTR.py,sha256=SaOK-0hIYImwLtb5Hqewi-e49ykJaQiLNvk8ZzNoG7Q,3750 -dns/rdtypes/IN/NSAP.py,sha256=6YfWCVSIPTTBmRAzG8nVBj3LnohncXUhSFJHgp-TRdc,2163 -dns/rdtypes/IN/NSAP_PTR.py,sha256=iTxlV6fr_Y9lqivLLncSHxEhmFqz5UEElDW3HMBtuCU,1015 -dns/rdtypes/IN/PX.py,sha256=vHDNN2rfLObuUKwpYDIvpPB482BqXlHA-ZQpQn9Sb_E,2756 -dns/rdtypes/IN/SRV.py,sha256=a0zGaUwzvih_a4Q9BViUTFs7NZaCqgl7mls3-KRVHm8,2769 -dns/rdtypes/IN/SVCB.py,sha256=HeFmi2v01F00Hott8FlvQ4R7aPxFmT7RF-gt45R5K_M,218 -dns/rdtypes/IN/WKS.py,sha256=kErSG5AO2qIuot_hkMHnQuZB1_uUzUirNdqBoCp97rk,3652 -dns/rdtypes/IN/__init__.py,sha256=HbI8aw9HWroI6SgEvl8Sx6FdkDswCCXMbSRuJy5o8LQ,1083 -dns/rdtypes/IN/__pycache__/A.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/AAAA.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/APL.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/DHCID.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/HTTPS.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/KX.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/NAPTR.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/NSAP.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/PX.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/SRV.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/SVCB.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/WKS.cpython-312.pyc,, -dns/rdtypes/IN/__pycache__/__init__.cpython-312.pyc,, -dns/rdtypes/__init__.py,sha256=NYizfGglJfhqt_GMtSSXf7YQXIEHHCiJ_Y_qaLVeiOI,1073 -dns/rdtypes/__pycache__/__init__.cpython-312.pyc,, -dns/rdtypes/__pycache__/dnskeybase.cpython-312.pyc,, -dns/rdtypes/__pycache__/dsbase.cpython-312.pyc,, -dns/rdtypes/__pycache__/euibase.cpython-312.pyc,, -dns/rdtypes/__pycache__/mxbase.cpython-312.pyc,, -dns/rdtypes/__pycache__/nsbase.cpython-312.pyc,, -dns/rdtypes/__pycache__/svcbbase.cpython-312.pyc,, -dns/rdtypes/__pycache__/tlsabase.cpython-312.pyc,, -dns/rdtypes/__pycache__/txtbase.cpython-312.pyc,, -dns/rdtypes/__pycache__/util.cpython-312.pyc,, -dns/rdtypes/dnskeybase.py,sha256=FoDllfa9Pz2j2rf45VyUUYUsIt3kjjrwDy6LxrlPb5s,2856 -dns/rdtypes/dsbase.py,sha256=I85Aps1lBsiItdqGpsNY1O8icosfPtkWjiUn1J1lLUQ,3427 -dns/rdtypes/euibase.py,sha256=1yKWOM4xBwLLIFFfEj7M9JMankO2l8ljxhG-5OOxXDg,2618 -dns/rdtypes/mxbase.py,sha256=DzjbiKoAAgpqbhwMBIFGA081jR5_doqGAq-kLvy2mns,3196 -dns/rdtypes/nsbase.py,sha256=tueXVV6E8lelebOmrmoOPq47eeRvOpsxHVXH4cOFxcs,2323 -dns/rdtypes/svcbbase.py,sha256=YOH3Wz3fp5GQjdTF7hU-1ys9iDkYEC5p4d0F32ivv5g,17612 -dns/rdtypes/tlsabase.py,sha256=pIiWem6sF4IwyyKmyqx5xg55IG0w3K9r502Yx8PdziA,2596 -dns/rdtypes/txtbase.py,sha256=Dt9ptWSWtnq0Qwlni6IT6YUz_DCixQDDUl5d4P_AfqY,3696 -dns/rdtypes/util.py,sha256=c3eLaucwuxXZjXWuNyCGKzwltgub4AjT4uLVytEuxSk,9017 -dns/renderer.py,sha256=5THf1iKql2JPL2sKZt2-b4zqHKfk_vlx0FEfPtMJysY,11254 -dns/resolver.py,sha256=FH_hiMeCdVYonIYmE3QqEWJKgHOOxlTcHS0dwd_loGY,73730 -dns/reversename.py,sha256=zoqXEbMZXm6R13nXbJHgTsf6L2C6uReODj6mqSHrTiE,3828 -dns/rrset.py,sha256=J-oQPEPJuKueLLiz1FN08P-ys9fjHhPWuwpDdrL4UTQ,9170 -dns/serial.py,sha256=-t5rPW-TcJwzBMfIJo7Tl-uDtaYtpqOfCVYx9dMaDCY,3606 -dns/set.py,sha256=hublMKCIhd9zp5Hz_fvQTwF-Ze28jn7mjqei6vTGWfs,9213 -dns/tokenizer.py,sha256=65vVkEeTuml3l2AT-NePE6Gt6ucDQNvSpeIVgMpP6G0,23583 -dns/transaction.py,sha256=UhwD6CLQI51dguuz__dxJS8V91vKAoqHdQDCBErJWxE,22589 -dns/tsig.py,sha256=I-Y-c3WMBX11bVioy5puFly2BhlpptUz82ikahxuh1c,11413 -dns/tsigkeyring.py,sha256=Z0xZemcU3XjZ9HlxBYv2E2PSuIhaFreqLDlD7HcmZDA,2633 -dns/ttl.py,sha256=Y4inc4bvkfKpogZn5i1n-tpg1CAjDJxH4_HvfeVjVsM,2977 -dns/update.py,sha256=y9d6LOO8xrUaH2UrZhy3ssnx8bJEsxqTArw5V8XqBRs,12243 -dns/version.py,sha256=GTecBDFJx8cKnGiCmxJhSVjk1EkqnuNVt4xailIi3sk,1926 -dns/versioned.py,sha256=3YQj8mzGmZEsjnuVJJjcWopVmDKYLhEj4hEGTLEwzco,11765 -dns/win32util.py,sha256=r9dOvC0Tq288vwPk-ngugsVpwB5YnfW22DaRv6TTPcU,8874 -dns/wire.py,sha256=vy0SolgECbO1UXB4dnhXhDeFKOJT29nQxXvSfKOgA5s,2830 -dns/xfr.py,sha256=aoW0UtvweaE0NV8cmzgMKLYQOa3hwJ3NudRuqjit4SU,13271 -dns/zone.py,sha256=lLAarSxPtpx4Sw29OQ0ifPshD4QauGu8RnPh2dEropA,52086 -dns/zonefile.py,sha256=Y9lm6I7n4eRS35CyclooiQ_jxiOs3pSyH_0uD4FQyag,27926 -dns/zonetypes.py,sha256=HrQNZxZ_gWLWI9dskix71msi9wkYK5pgrBBbPb1T74Y,690 -dnspython-2.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -dnspython-2.7.0.dist-info/METADATA,sha256=1lF6uqZwb6RAQFYVtBkLic6pBCe9t14TQWtkK9U5eyY,5763 -dnspython-2.7.0.dist-info/RECORD,, -dnspython-2.7.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 -dnspython-2.7.0.dist-info/licenses/LICENSE,sha256=w-o_9WVLMpwZ07xfdIGvYjw93tSmFFWFSZ-EOtPXQc0,1526 diff --git a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/WHEEL deleted file mode 100644 index cdd68a49..00000000 --- a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.25.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/licenses/LICENSE deleted file mode 100644 index 390a726d..00000000 --- a/venv/lib/python3.12/site-packages/dnspython-2.7.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,35 +0,0 @@ -ISC License - -Copyright (C) Dnspython Contributors - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all -copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR -PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - - - -Copyright (C) 2001-2017 Nominum, Inc. -Copyright (C) Google Inc. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose with or without fee is hereby granted, -provided that the above copyright notice and this permission notice -appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/dotenv/__init__.py b/venv/lib/python3.12/site-packages/dotenv/__init__.py deleted file mode 100644 index 7f4c631b..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Any, Optional - -from .main import (dotenv_values, find_dotenv, get_key, load_dotenv, set_key, - unset_key) - - -def load_ipython_extension(ipython: Any) -> None: - from .ipython import load_ipython_extension - load_ipython_extension(ipython) - - -def get_cli_string( - path: Optional[str] = None, - action: Optional[str] = None, - key: Optional[str] = None, - value: Optional[str] = None, - quote: Optional[str] = None, -): - """Returns a string suitable for running as a shell script. - - Useful for converting a arguments passed to a fabric task - to be passed to a `local` or `run` command. - """ - command = ['dotenv'] - if quote: - command.append(f'-q {quote}') - if path: - command.append(f'-f {path}') - if action: - command.append(action) - if key: - command.append(key) - if value: - if ' ' in value: - command.append(f'"{value}"') - else: - command.append(value) - - return ' '.join(command).strip() - - -__all__ = ['get_cli_string', - 'load_dotenv', - 'dotenv_values', - 'get_key', - 'set_key', - 'unset_key', - 'find_dotenv', - 'load_ipython_extension'] diff --git a/venv/lib/python3.12/site-packages/dotenv/__main__.py b/venv/lib/python3.12/site-packages/dotenv/__main__.py deleted file mode 100644 index 3977f55a..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Entry point for cli, enables execution with `python -m dotenv`""" - -from .cli import cli - -if __name__ == "__main__": - cli() diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index ce0d8cdd..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index 04cd21bf..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/cli.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/cli.cpython-312.pyc deleted file mode 100644 index 78e671ef..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/cli.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/ipython.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/ipython.cpython-312.pyc deleted file mode 100644 index fc0c28fc..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/ipython.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/main.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/main.cpython-312.pyc deleted file mode 100644 index 08f0d4fe..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/main.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/parser.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/parser.cpython-312.pyc deleted file mode 100644 index 72c21222..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/parser.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/variables.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/variables.cpython-312.pyc deleted file mode 100644 index 66f16357..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/variables.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/dotenv/__pycache__/version.cpython-312.pyc deleted file mode 100644 index a34c11bf..00000000 Binary files a/venv/lib/python3.12/site-packages/dotenv/__pycache__/version.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/dotenv/cli.py b/venv/lib/python3.12/site-packages/dotenv/cli.py deleted file mode 100644 index 075a7af1..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/cli.py +++ /dev/null @@ -1,205 +0,0 @@ -import json -import os -import shlex -import sys -from contextlib import contextmanager -from typing import Any, Dict, IO, Iterator, List, Optional - -if sys.platform == 'win32': - from subprocess import Popen - -try: - import click -except ImportError: - sys.stderr.write('It seems python-dotenv is not installed with cli option. \n' - 'Run pip install "python-dotenv[cli]" to fix this.') - sys.exit(1) - -from .main import dotenv_values, set_key, unset_key -from .version import __version__ - - -def enumerate_env() -> Optional[str]: - """ - Return a path for the ${pwd}/.env file. - - If pwd does not exist, return None. - """ - try: - cwd = os.getcwd() - except FileNotFoundError: - return None - path = os.path.join(cwd, '.env') - return path - - -@click.group() -@click.option('-f', '--file', default=enumerate_env(), - type=click.Path(file_okay=True), - help="Location of the .env file, defaults to .env file in current working directory.") -@click.option('-q', '--quote', default='always', - type=click.Choice(['always', 'never', 'auto']), - help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") -@click.option('-e', '--export', default=False, - type=click.BOOL, - help="Whether to write the dot file as an executable bash script.") -@click.version_option(version=__version__) -@click.pass_context -def cli(ctx: click.Context, file: Any, quote: Any, export: Any) -> None: - """This script is used to set, get or unset values from a .env file.""" - ctx.obj = {'QUOTE': quote, 'EXPORT': export, 'FILE': file} - - -@contextmanager -def stream_file(path: os.PathLike) -> Iterator[IO[str]]: - """ - Open a file and yield the corresponding (decoded) stream. - - Exits with error code 2 if the file cannot be opened. - """ - - try: - with open(path) as stream: - yield stream - except OSError as exc: - print(f"Error opening env file: {exc}", file=sys.stderr) - exit(2) - - -@cli.command() -@click.pass_context -@click.option('--format', default='simple', - type=click.Choice(['simple', 'json', 'shell', 'export']), - help="The format in which to display the list. Default format is simple, " - "which displays name=value without quotes.") -def list(ctx: click.Context, format: bool) -> None: - """Display all the stored key/value.""" - file = ctx.obj['FILE'] - - with stream_file(file) as stream: - values = dotenv_values(stream=stream) - - if format == 'json': - click.echo(json.dumps(values, indent=2, sort_keys=True)) - else: - prefix = 'export ' if format == 'export' else '' - for k in sorted(values): - v = values[k] - if v is not None: - if format in ('export', 'shell'): - v = shlex.quote(v) - click.echo(f'{prefix}{k}={v}') - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -@click.argument('value', required=True) -def set(ctx: click.Context, key: Any, value: Any) -> None: - """Store the given key/value.""" - file = ctx.obj['FILE'] - quote = ctx.obj['QUOTE'] - export = ctx.obj['EXPORT'] - success, key, value = set_key(file, key, value, quote, export) - if success: - click.echo(f'{key}={value}') - else: - exit(1) - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -def get(ctx: click.Context, key: Any) -> None: - """Retrieve the value for the given key.""" - file = ctx.obj['FILE'] - - with stream_file(file) as stream: - values = dotenv_values(stream=stream) - - stored_value = values.get(key) - if stored_value: - click.echo(stored_value) - else: - exit(1) - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -def unset(ctx: click.Context, key: Any) -> None: - """Removes the given key.""" - file = ctx.obj['FILE'] - quote = ctx.obj['QUOTE'] - success, key = unset_key(file, key, quote) - if success: - click.echo(f"Successfully removed {key}") - else: - exit(1) - - -@cli.command(context_settings={'ignore_unknown_options': True}) -@click.pass_context -@click.option( - "--override/--no-override", - default=True, - help="Override variables from the environment file with those from the .env file.", -) -@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) -def run(ctx: click.Context, override: bool, commandline: List[str]) -> None: - """Run command with environment variables present.""" - file = ctx.obj['FILE'] - if not os.path.isfile(file): - raise click.BadParameter( - f'Invalid value for \'-f\' "{file}" does not exist.', - ctx=ctx - ) - dotenv_as_dict = { - k: v - for (k, v) in dotenv_values(file).items() - if v is not None and (override or k not in os.environ) - } - - if not commandline: - click.echo('No command given.') - exit(1) - run_command(commandline, dotenv_as_dict) - - -def run_command(command: List[str], env: Dict[str, str]) -> None: - """Replace the current process with the specified command. - - Replaces the current process with the specified command and the variables from `env` - added in the current environment variables. - - Parameters - ---------- - command: List[str] - The command and it's parameters - env: Dict - The additional environment variables - - Returns - ------- - None - This function does not return any value. It replaces the current process with the new one. - - """ - # copy the current environment variables and add the vales from - # `env` - cmd_env = os.environ.copy() - cmd_env.update(env) - - if sys.platform == 'win32': - # execvpe on Windows returns control immediately - # rather than once the command has finished. - p = Popen(command, - universal_newlines=True, - bufsize=0, - shell=False, - env=cmd_env) - _, _ = p.communicate() - - exit(p.returncode) - else: - os.execvpe(command[0], args=command, env=cmd_env) diff --git a/venv/lib/python3.12/site-packages/dotenv/ipython.py b/venv/lib/python3.12/site-packages/dotenv/ipython.py deleted file mode 100644 index 7df727cd..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/ipython.py +++ /dev/null @@ -1,39 +0,0 @@ -from IPython.core.magic import Magics, line_magic, magics_class # type: ignore -from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore - parse_argstring) # type: ignore - -from .main import find_dotenv, load_dotenv - - -@magics_class -class IPythonDotEnv(Magics): - - @magic_arguments() - @argument( - '-o', '--override', action='store_true', - help="Indicate to override existing variables" - ) - @argument( - '-v', '--verbose', action='store_true', - help="Indicate function calls to be verbose" - ) - @argument('dotenv_path', nargs='?', type=str, default='.env', - help='Search in increasingly higher folders for the `dotenv_path`') - @line_magic - def dotenv(self, line): - args = parse_argstring(self.dotenv, line) - # Locate the .env file - dotenv_path = args.dotenv_path - try: - dotenv_path = find_dotenv(dotenv_path, True, True) - except IOError: - print("cannot find .env file") - return - - # Load the .env file - load_dotenv(dotenv_path, verbose=args.verbose, override=args.override) - - -def load_ipython_extension(ipython): - """Register the %dotenv magic.""" - ipython.register_magics(IPythonDotEnv) diff --git a/venv/lib/python3.12/site-packages/dotenv/main.py b/venv/lib/python3.12/site-packages/dotenv/main.py deleted file mode 100644 index 8e6a7cf4..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/main.py +++ /dev/null @@ -1,400 +0,0 @@ -import io -import logging -import os -import pathlib -import shutil -import sys -import tempfile -from collections import OrderedDict -from contextlib import contextmanager -from typing import IO, Dict, Iterable, Iterator, Mapping, Optional, Tuple, Union - -from .parser import Binding, parse_stream -from .variables import parse_variables - -# A type alias for a string path to be used for the paths in this file. -# These paths may flow to `open()` and `shutil.move()`; `shutil.move()` -# only accepts string paths, not byte paths or file descriptors. See -# https://github.com/python/typeshed/pull/6832. -StrPath = Union[str, "os.PathLike[str]"] - -logger = logging.getLogger(__name__) - - -def with_warn_for_invalid_lines(mappings: Iterator[Binding]) -> Iterator[Binding]: - for mapping in mappings: - if mapping.error: - logger.warning( - "python-dotenv could not parse statement starting at line %s", - mapping.original.line, - ) - yield mapping - - -class DotEnv: - def __init__( - self, - dotenv_path: Optional[StrPath], - stream: Optional[IO[str]] = None, - verbose: bool = False, - encoding: Optional[str] = None, - interpolate: bool = True, - override: bool = True, - ) -> None: - self.dotenv_path: Optional[StrPath] = dotenv_path - self.stream: Optional[IO[str]] = stream - self._dict: Optional[Dict[str, Optional[str]]] = None - self.verbose: bool = verbose - self.encoding: Optional[str] = encoding - self.interpolate: bool = interpolate - self.override: bool = override - - @contextmanager - def _get_stream(self) -> Iterator[IO[str]]: - if self.dotenv_path and os.path.isfile(self.dotenv_path): - with open(self.dotenv_path, encoding=self.encoding) as stream: - yield stream - elif self.stream is not None: - yield self.stream - else: - if self.verbose: - logger.info( - "python-dotenv could not find configuration file %s.", - self.dotenv_path or ".env", - ) - yield io.StringIO("") - - def dict(self) -> Dict[str, Optional[str]]: - """Return dotenv as dict""" - if self._dict: - return self._dict - - raw_values = self.parse() - - if self.interpolate: - self._dict = OrderedDict( - resolve_variables(raw_values, override=self.override) - ) - else: - self._dict = OrderedDict(raw_values) - - return self._dict - - def parse(self) -> Iterator[Tuple[str, Optional[str]]]: - with self._get_stream() as stream: - for mapping in with_warn_for_invalid_lines(parse_stream(stream)): - if mapping.key is not None: - yield mapping.key, mapping.value - - def set_as_environment_variables(self) -> bool: - """ - Load the current dotenv as system environment variable. - """ - if not self.dict(): - return False - - for k, v in self.dict().items(): - if k in os.environ and not self.override: - continue - if v is not None: - os.environ[k] = v - - return True - - def get(self, key: str) -> Optional[str]: - """ """ - data = self.dict() - - if key in data: - return data[key] - - if self.verbose: - logger.warning("Key %s not found in %s.", key, self.dotenv_path) - - return None - - -def get_key( - dotenv_path: StrPath, - key_to_get: str, - encoding: Optional[str] = "utf-8", -) -> Optional[str]: - """ - Get the value of a given key from the given .env. - - Returns `None` if the key isn't found or doesn't have a value. - """ - return DotEnv(dotenv_path, verbose=True, encoding=encoding).get(key_to_get) - - -@contextmanager -def rewrite( - path: StrPath, - encoding: Optional[str], -) -> Iterator[Tuple[IO[str], IO[str]]]: - pathlib.Path(path).touch() - - with tempfile.NamedTemporaryFile(mode="w", encoding=encoding, delete=False) as dest: - error = None - try: - with open(path, encoding=encoding) as source: - yield (source, dest) - except BaseException as err: - error = err - - if error is None: - shutil.move(dest.name, path) - else: - os.unlink(dest.name) - raise error from None - - -def set_key( - dotenv_path: StrPath, - key_to_set: str, - value_to_set: str, - quote_mode: str = "always", - export: bool = False, - encoding: Optional[str] = "utf-8", -) -> Tuple[Optional[bool], str, str]: - """ - Adds or Updates a key/value to the given .env - - If the .env path given doesn't exist, fails instead of risking creating - an orphan .env somewhere in the filesystem - """ - if quote_mode not in ("always", "auto", "never"): - raise ValueError(f"Unknown quote_mode: {quote_mode}") - - quote = quote_mode == "always" or ( - quote_mode == "auto" and not value_to_set.isalnum() - ) - - if quote: - value_out = "'{}'".format(value_to_set.replace("'", "\\'")) - else: - value_out = value_to_set - if export: - line_out = f"export {key_to_set}={value_out}\n" - else: - line_out = f"{key_to_set}={value_out}\n" - - with rewrite(dotenv_path, encoding=encoding) as (source, dest): - replaced = False - missing_newline = False - for mapping in with_warn_for_invalid_lines(parse_stream(source)): - if mapping.key == key_to_set: - dest.write(line_out) - replaced = True - else: - dest.write(mapping.original.string) - missing_newline = not mapping.original.string.endswith("\n") - if not replaced: - if missing_newline: - dest.write("\n") - dest.write(line_out) - - return True, key_to_set, value_to_set - - -def unset_key( - dotenv_path: StrPath, - key_to_unset: str, - quote_mode: str = "always", - encoding: Optional[str] = "utf-8", -) -> Tuple[Optional[bool], str]: - """ - Removes a given key from the given `.env` file. - - If the .env path given doesn't exist, fails. - If the given key doesn't exist in the .env, fails. - """ - if not os.path.exists(dotenv_path): - logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path) - return None, key_to_unset - - removed = False - with rewrite(dotenv_path, encoding=encoding) as (source, dest): - for mapping in with_warn_for_invalid_lines(parse_stream(source)): - if mapping.key == key_to_unset: - removed = True - else: - dest.write(mapping.original.string) - - if not removed: - logger.warning( - "Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path - ) - return None, key_to_unset - - return removed, key_to_unset - - -def resolve_variables( - values: Iterable[Tuple[str, Optional[str]]], - override: bool, -) -> Mapping[str, Optional[str]]: - new_values: Dict[str, Optional[str]] = {} - - for name, value in values: - if value is None: - result = None - else: - atoms = parse_variables(value) - env: Dict[str, Optional[str]] = {} - if override: - env.update(os.environ) # type: ignore - env.update(new_values) - else: - env.update(new_values) - env.update(os.environ) # type: ignore - result = "".join(atom.resolve(env) for atom in atoms) - - new_values[name] = result - - return new_values - - -def _walk_to_root(path: str) -> Iterator[str]: - """ - Yield directories starting from the given directory up to the root - """ - if not os.path.exists(path): - raise IOError("Starting path not found") - - if os.path.isfile(path): - path = os.path.dirname(path) - - last_dir = None - current_dir = os.path.abspath(path) - while last_dir != current_dir: - yield current_dir - parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) - last_dir, current_dir = current_dir, parent_dir - - -def find_dotenv( - filename: str = ".env", - raise_error_if_not_found: bool = False, - usecwd: bool = False, -) -> str: - """ - Search in increasingly higher folders for the given file - - Returns path to the file if found, or an empty string otherwise - """ - - def _is_interactive(): - """Decide whether this is running in a REPL or IPython notebook""" - if hasattr(sys, "ps1") or hasattr(sys, "ps2"): - return True - try: - main = __import__("__main__", None, None, fromlist=["__file__"]) - except ModuleNotFoundError: - return False - return not hasattr(main, "__file__") - - def _is_debugger(): - return sys.gettrace() is not None - - if usecwd or _is_interactive() or _is_debugger() or getattr(sys, "frozen", False): - # Should work without __file__, e.g. in REPL or IPython notebook. - path = os.getcwd() - else: - # will work for .py files - frame = sys._getframe() - current_file = __file__ - - while frame.f_code.co_filename == current_file or not os.path.exists( - frame.f_code.co_filename - ): - assert frame.f_back is not None - frame = frame.f_back - frame_filename = frame.f_code.co_filename - path = os.path.dirname(os.path.abspath(frame_filename)) - - for dirname in _walk_to_root(path): - check_path = os.path.join(dirname, filename) - if os.path.isfile(check_path): - return check_path - - if raise_error_if_not_found: - raise IOError("File not found") - - return "" - - -def load_dotenv( - dotenv_path: Optional[StrPath] = None, - stream: Optional[IO[str]] = None, - verbose: bool = False, - override: bool = False, - interpolate: bool = True, - encoding: Optional[str] = "utf-8", -) -> bool: - """Parse a .env file and then load all the variables found as environment variables. - - Parameters: - dotenv_path: Absolute or relative path to .env file. - stream: Text stream (such as `io.StringIO`) with .env content, used if - `dotenv_path` is `None`. - verbose: Whether to output a warning the .env file is missing. - override: Whether to override the system environment variables with the variables - from the `.env` file. - encoding: Encoding to be used to read the file. - Returns: - Bool: True if at least one environment variable is set else False - - If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the - .env file with it's default parameters. If you need to change the default parameters - of `find_dotenv()`, you can explicitly call `find_dotenv()` and pass the result - to this function as `dotenv_path`. - """ - if dotenv_path is None and stream is None: - dotenv_path = find_dotenv() - - dotenv = DotEnv( - dotenv_path=dotenv_path, - stream=stream, - verbose=verbose, - interpolate=interpolate, - override=override, - encoding=encoding, - ) - return dotenv.set_as_environment_variables() - - -def dotenv_values( - dotenv_path: Optional[StrPath] = None, - stream: Optional[IO[str]] = None, - verbose: bool = False, - interpolate: bool = True, - encoding: Optional[str] = "utf-8", -) -> Dict[str, Optional[str]]: - """ - Parse a .env file and return its content as a dict. - - The returned dict will have `None` values for keys without values in the .env file. - For example, `foo=bar` results in `{"foo": "bar"}` whereas `foo` alone results in - `{"foo": None}` - - Parameters: - dotenv_path: Absolute or relative path to the .env file. - stream: `StringIO` object with .env content, used if `dotenv_path` is `None`. - verbose: Whether to output a warning if the .env file is missing. - encoding: Encoding to be used to read the file. - - If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the - .env file. - """ - if dotenv_path is None and stream is None: - dotenv_path = find_dotenv() - - return DotEnv( - dotenv_path=dotenv_path, - stream=stream, - verbose=verbose, - interpolate=interpolate, - override=True, - encoding=encoding, - ).dict() diff --git a/venv/lib/python3.12/site-packages/dotenv/parser.py b/venv/lib/python3.12/site-packages/dotenv/parser.py deleted file mode 100644 index 735f14a3..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/parser.py +++ /dev/null @@ -1,175 +0,0 @@ -import codecs -import re -from typing import (IO, Iterator, Match, NamedTuple, Optional, # noqa:F401 - Pattern, Sequence, Tuple) - - -def make_regex(string: str, extra_flags: int = 0) -> Pattern[str]: - return re.compile(string, re.UNICODE | extra_flags) - - -_newline = make_regex(r"(\r\n|\n|\r)") -_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) -_whitespace = make_regex(r"[^\S\r\n]*") -_export = make_regex(r"(?:export[^\S\r\n]+)?") -_single_quoted_key = make_regex(r"'([^']+)'") -_unquoted_key = make_regex(r"([^=\#\s]+)") -_equal_sign = make_regex(r"(=[^\S\r\n]*)") -_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'") -_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"') -_unquoted_value = make_regex(r"([^\r\n]*)") -_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?") -_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)") -_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") -_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") -_single_quote_escapes = make_regex(r"\\[\\']") - - -class Original(NamedTuple): - string: str - line: int - - -class Binding(NamedTuple): - key: Optional[str] - value: Optional[str] - original: Original - error: bool - - -class Position: - def __init__(self, chars: int, line: int) -> None: - self.chars = chars - self.line = line - - @classmethod - def start(cls) -> "Position": - return cls(chars=0, line=1) - - def set(self, other: "Position") -> None: - self.chars = other.chars - self.line = other.line - - def advance(self, string: str) -> None: - self.chars += len(string) - self.line += len(re.findall(_newline, string)) - - -class Error(Exception): - pass - - -class Reader: - def __init__(self, stream: IO[str]) -> None: - self.string = stream.read() - self.position = Position.start() - self.mark = Position.start() - - def has_next(self) -> bool: - return self.position.chars < len(self.string) - - def set_mark(self) -> None: - self.mark.set(self.position) - - def get_marked(self) -> Original: - return Original( - string=self.string[self.mark.chars:self.position.chars], - line=self.mark.line, - ) - - def peek(self, count: int) -> str: - return self.string[self.position.chars:self.position.chars + count] - - def read(self, count: int) -> str: - result = self.string[self.position.chars:self.position.chars + count] - if len(result) < count: - raise Error("read: End of string") - self.position.advance(result) - return result - - def read_regex(self, regex: Pattern[str]) -> Sequence[str]: - match = regex.match(self.string, self.position.chars) - if match is None: - raise Error("read_regex: Pattern not found") - self.position.advance(self.string[match.start():match.end()]) - return match.groups() - - -def decode_escapes(regex: Pattern[str], string: str) -> str: - def decode_match(match: Match[str]) -> str: - return codecs.decode(match.group(0), 'unicode-escape') # type: ignore - - return regex.sub(decode_match, string) - - -def parse_key(reader: Reader) -> Optional[str]: - char = reader.peek(1) - if char == "#": - return None - elif char == "'": - (key,) = reader.read_regex(_single_quoted_key) - else: - (key,) = reader.read_regex(_unquoted_key) - return key - - -def parse_unquoted_value(reader: Reader) -> str: - (part,) = reader.read_regex(_unquoted_value) - return re.sub(r"\s+#.*", "", part).rstrip() - - -def parse_value(reader: Reader) -> str: - char = reader.peek(1) - if char == u"'": - (value,) = reader.read_regex(_single_quoted_value) - return decode_escapes(_single_quote_escapes, value) - elif char == u'"': - (value,) = reader.read_regex(_double_quoted_value) - return decode_escapes(_double_quote_escapes, value) - elif char in (u"", u"\n", u"\r"): - return u"" - else: - return parse_unquoted_value(reader) - - -def parse_binding(reader: Reader) -> Binding: - reader.set_mark() - try: - reader.read_regex(_multiline_whitespace) - if not reader.has_next(): - return Binding( - key=None, - value=None, - original=reader.get_marked(), - error=False, - ) - reader.read_regex(_export) - key = parse_key(reader) - reader.read_regex(_whitespace) - if reader.peek(1) == "=": - reader.read_regex(_equal_sign) - value: Optional[str] = parse_value(reader) - else: - value = None - reader.read_regex(_comment) - reader.read_regex(_end_of_line) - return Binding( - key=key, - value=value, - original=reader.get_marked(), - error=False, - ) - except Error: - reader.read_regex(_rest_of_line) - return Binding( - key=None, - value=None, - original=reader.get_marked(), - error=True, - ) - - -def parse_stream(stream: IO[str]) -> Iterator[Binding]: - reader = Reader(stream) - while reader.has_next(): - yield parse_binding(reader) diff --git a/venv/lib/python3.12/site-packages/dotenv/py.typed b/venv/lib/python3.12/site-packages/dotenv/py.typed deleted file mode 100644 index 7632ecf7..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561 diff --git a/venv/lib/python3.12/site-packages/dotenv/variables.py b/venv/lib/python3.12/site-packages/dotenv/variables.py deleted file mode 100644 index 667f2f26..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/variables.py +++ /dev/null @@ -1,86 +0,0 @@ -import re -from abc import ABCMeta, abstractmethod -from typing import Iterator, Mapping, Optional, Pattern - -_posix_variable: Pattern[str] = re.compile( - r""" - \$\{ - (?P[^\}:]*) - (?::- - (?P[^\}]*) - )? - \} - """, - re.VERBOSE, -) - - -class Atom(metaclass=ABCMeta): - def __ne__(self, other: object) -> bool: - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - return not result - - @abstractmethod - def resolve(self, env: Mapping[str, Optional[str]]) -> str: ... - - -class Literal(Atom): - def __init__(self, value: str) -> None: - self.value = value - - def __repr__(self) -> str: - return f"Literal(value={self.value})" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, self.__class__): - return NotImplemented - return self.value == other.value - - def __hash__(self) -> int: - return hash((self.__class__, self.value)) - - def resolve(self, env: Mapping[str, Optional[str]]) -> str: - return self.value - - -class Variable(Atom): - def __init__(self, name: str, default: Optional[str]) -> None: - self.name = name - self.default = default - - def __repr__(self) -> str: - return f"Variable(name={self.name}, default={self.default})" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, self.__class__): - return NotImplemented - return (self.name, self.default) == (other.name, other.default) - - def __hash__(self) -> int: - return hash((self.__class__, self.name, self.default)) - - def resolve(self, env: Mapping[str, Optional[str]]) -> str: - default = self.default if self.default is not None else "" - result = env.get(self.name, default) - return result if result is not None else "" - - -def parse_variables(value: str) -> Iterator[Atom]: - cursor = 0 - - for match in _posix_variable.finditer(value): - (start, end) = match.span() - name = match["name"] - default = match["default"] - - if start > cursor: - yield Literal(value=value[cursor:start]) - - yield Variable(name=name, default=default) - cursor = end - - length = len(value) - if cursor < length: - yield Literal(value=value[cursor:length]) diff --git a/venv/lib/python3.12/site-packages/dotenv/version.py b/venv/lib/python3.12/site-packages/dotenv/version.py deleted file mode 100644 index a82b376d..00000000 --- a/venv/lib/python3.12/site-packages/dotenv/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "1.1.1" diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/LICENSE b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/LICENSE deleted file mode 100644 index 474479a2..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -"python-ecdsa" Copyright (c) 2010 Brian Warner - -Portions written in 2005 by Peter Pearson and placed in the public domain. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/METADATA deleted file mode 100644 index 3e63190b..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/METADATA +++ /dev/null @@ -1,671 +0,0 @@ -Metadata-Version: 2.1 -Name: ecdsa -Version: 0.19.1 -Summary: ECDSA cryptographic signature library (pure python) -Home-page: http://github.com/tlsfuzzer/python-ecdsa -Author: Brian Warner -Author-email: warner@lothar.com -License: MIT -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.* -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: six >=1.9.0 -Provides-Extra: gmpy -Requires-Dist: gmpy ; extra == 'gmpy' -Provides-Extra: gmpy2 -Requires-Dist: gmpy2 ; extra == 'gmpy2' - -# Pure-Python ECDSA and ECDH - -[![GitHub CI](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/ci.yml/badge.svg)](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/ci.yml) -[![Documentation Status](https://readthedocs.org/projects/ecdsa/badge/?version=latest)](https://ecdsa.readthedocs.io/en/latest/?badge=latest) -[![Coverage Status](https://coveralls.io/repos/github/tlsfuzzer/python-ecdsa/badge.svg?branch=master)](https://coveralls.io/github/tlsfuzzer/python-ecdsa?branch=master) -![condition coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-condition-coverage.json) -![mutation score](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomato42/9b6ca1f3410207fbeca785a178781651/raw/python-ecdsa-mutation-score.json) -[![CodeQL](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/codeql.yml/badge.svg)](https://github.com/tlsfuzzer/python-ecdsa/actions/workflows/codeql.yml) -[![Latest Version](https://img.shields.io/pypi/v/ecdsa.svg?style=flat)](https://pypi.python.org/pypi/ecdsa/) -![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=flat) - - -This is an easy-to-use implementation of ECC (Elliptic Curve Cryptography) -with support for ECDSA (Elliptic Curve Digital Signature Algorithm), -EdDSA (Edwards-curve Digital Signature Algorithm) and ECDH -(Elliptic Curve Diffie-Hellman), implemented purely in Python, released under -the MIT license. With this library, you can quickly create key pairs (signing -key and verifying key), sign messages, and verify the signatures. You can -also agree on a shared secret key based on exchanged public keys. -The keys and signatures are very short, making them easy to handle and -incorporate into other protocols. - -**NOTE: This library should not be used in production settings, see [Security](#Security) for more details.** - -## Features - -This library provides key generation, signing, verifying, and shared secret -derivation for five -popular NIST "Suite B" GF(p) (_prime field_) curves, with key lengths of 192, -224, 256, 384, and 521 bits. The "short names" for these curves, as known by -the OpenSSL tool (`openssl ecparam -list_curves`), are: `prime192v1`, -`secp224r1`, `prime256v1`, `secp384r1`, and `secp521r1`. It includes the -256-bit curve `secp256k1` used by Bitcoin. There is also support for the -regular (non-twisted) variants of Brainpool curves from 160 to 512 bits. The -"short names" of those curves are: `brainpoolP160r1`, `brainpoolP192r1`, -`brainpoolP224r1`, `brainpoolP256r1`, `brainpoolP320r1`, `brainpoolP384r1`, -`brainpoolP512r1`. Few of the small curves from SEC standard are also -included (mainly to speed-up testing of the library), those are: -`secp112r1`, `secp112r2`, `secp128r1`, and `secp160r1`. -Key generation, siging and verifying is also supported for Ed25519 and -Ed448 curves. -No other curves are included, but it is not too hard to add support for more -curves over prime fields. - -## Dependencies - -This library uses only Python and the 'six' package. It is compatible with -Python 2.6, 2.7, and 3.6+. It also supports execution on alternative -implementations like pypy and pypy3. - -If `gmpy2` or `gmpy` is installed, they will be used for faster arithmetic. -Either of them can be installed after this library is installed, -`python-ecdsa` will detect their presence on start-up and use them -automatically. -You should prefer `gmpy2` on Python3 for optimal performance. - -To run the OpenSSL compatibility tests, the 'openssl' tool must be in your -`PATH`. This release has been tested successfully against OpenSSL 0.9.8o, -1.0.0a, 1.0.2f, 1.1.1d and 3.0.1 (among others). - - -## Installation - -This library is available on PyPI, it's recommended to install it using `pip`: - -``` -pip install ecdsa -``` - -In case higher performance is wanted and using native code is not a problem, -it's possible to specify installation together with `gmpy2`: - -``` -pip install ecdsa[gmpy2] -``` - -or (slower, legacy option): -``` -pip install ecdsa[gmpy] -``` - -## Speed - -The following table shows how long this library takes to generate key pairs -(`keygen`), to sign data (`sign`), to verify those signatures (`verify`), -to derive a shared secret (`ecdh`), and -to verify the signatures with no key-specific precomputation (`no PC verify`). -All those values are in seconds. -For convenience, the inverses of those values are also provided: -how many keys per second can be generated (`keygen/s`), how many signatures -can be made per second (`sign/s`), how many signatures can be verified -per second (`verify/s`), how many shared secrets can be derived per second -(`ecdh/s`), and how many signatures with no key specific -precomputation can be verified per second (`no PC verify/s`). The size of raw -signature (generally the smallest -the way a signature can be encoded) is also provided in the `siglen` column. -Use `tox -e speed` to generate this table on your own computer. -On an Intel Core i7 4790K @ 4.0GHz I'm getting the following performance: - -``` - siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s - NIST192p: 48 0.00032s 3134.06 0.00033s 2985.53 0.00063s 1598.36 0.00129s 774.43 - NIST224p: 56 0.00040s 2469.24 0.00042s 2367.88 0.00081s 1233.41 0.00170s 586.66 - NIST256p: 64 0.00051s 1952.73 0.00054s 1867.80 0.00098s 1021.86 0.00212s 471.27 - NIST384p: 96 0.00107s 935.92 0.00111s 904.23 0.00203s 491.77 0.00446s 224.00 - NIST521p: 132 0.00210s 475.52 0.00215s 464.16 0.00398s 251.28 0.00874s 114.39 - SECP256k1: 64 0.00052s 1921.54 0.00054s 1847.49 0.00105s 948.68 0.00210s 477.01 - BRAINPOOLP160r1: 40 0.00025s 4003.88 0.00026s 3845.12 0.00053s 1893.93 0.00105s 949.92 - BRAINPOOLP192r1: 48 0.00033s 3043.97 0.00034s 2975.98 0.00063s 1581.50 0.00135s 742.29 - BRAINPOOLP224r1: 56 0.00041s 2436.44 0.00043s 2315.51 0.00078s 1278.49 0.00180s 556.16 - BRAINPOOLP256r1: 64 0.00053s 1892.49 0.00054s 1846.24 0.00114s 875.64 0.00229s 437.25 - BRAINPOOLP320r1: 80 0.00073s 1361.26 0.00076s 1309.25 0.00143s 699.29 0.00322s 310.49 - BRAINPOOLP384r1: 96 0.00107s 931.29 0.00111s 901.80 0.00230s 434.19 0.00476s 210.20 - BRAINPOOLP512r1: 128 0.00207s 483.41 0.00212s 471.42 0.00425s 235.43 0.00912s 109.61 - SECP112r1: 28 0.00015s 6672.53 0.00016s 6440.34 0.00031s 3265.41 0.00056s 1774.20 - SECP112r2: 28 0.00015s 6697.11 0.00015s 6479.98 0.00028s 3524.72 0.00058s 1716.16 - SECP128r1: 32 0.00018s 5497.65 0.00019s 5272.89 0.00036s 2747.39 0.00072s 1396.16 - SECP160r1: 42 0.00025s 3949.32 0.00026s 3894.45 0.00046s 2153.85 0.00102s 985.07 - Ed25519: 64 0.00076s 1324.48 0.00042s 2405.01 0.00109s 918.05 0.00344s 290.50 - Ed448: 114 0.00176s 569.53 0.00115s 870.94 0.00282s 355.04 0.01024s 97.69 - - ecdh ecdh/s - NIST192p: 0.00104s 964.89 - NIST224p: 0.00134s 748.63 - NIST256p: 0.00170s 587.08 - NIST384p: 0.00352s 283.90 - NIST521p: 0.00717s 139.51 - SECP256k1: 0.00154s 648.40 - BRAINPOOLP160r1: 0.00082s 1220.70 - BRAINPOOLP192r1: 0.00105s 956.75 - BRAINPOOLP224r1: 0.00136s 734.52 - BRAINPOOLP256r1: 0.00178s 563.32 - BRAINPOOLP320r1: 0.00252s 397.23 - BRAINPOOLP384r1: 0.00376s 266.27 - BRAINPOOLP512r1: 0.00733s 136.35 - SECP112r1: 0.00046s 2180.40 - SECP112r2: 0.00045s 2229.14 - SECP128r1: 0.00054s 1868.15 - SECP160r1: 0.00080s 1243.98 -``` - -To test performance with `gmpy2` loaded, use `tox -e speedgmpy2`. -On the same machine I'm getting the following performance with `gmpy2`: -``` - siglen keygen keygen/s sign sign/s verify verify/s no PC verify no PC verify/s - NIST192p: 48 0.00017s 5933.40 0.00017s 5751.70 0.00032s 3125.28 0.00067s 1502.41 - NIST224p: 56 0.00021s 4782.87 0.00022s 4610.05 0.00040s 2487.04 0.00089s 1126.90 - NIST256p: 64 0.00023s 4263.98 0.00024s 4125.16 0.00045s 2200.88 0.00098s 1016.82 - NIST384p: 96 0.00041s 2449.54 0.00042s 2399.96 0.00083s 1210.57 0.00172s 581.43 - NIST521p: 132 0.00071s 1416.07 0.00072s 1389.81 0.00144s 692.93 0.00312s 320.40 - SECP256k1: 64 0.00024s 4245.05 0.00024s 4122.09 0.00045s 2206.40 0.00094s 1068.32 - BRAINPOOLP160r1: 40 0.00014s 6939.17 0.00015s 6681.55 0.00029s 3452.43 0.00057s 1769.81 - BRAINPOOLP192r1: 48 0.00017s 5920.05 0.00017s 5774.36 0.00034s 2979.00 0.00069s 1453.19 - BRAINPOOLP224r1: 56 0.00021s 4732.12 0.00022s 4622.65 0.00041s 2422.47 0.00087s 1149.87 - BRAINPOOLP256r1: 64 0.00024s 4233.02 0.00024s 4115.20 0.00047s 2143.27 0.00098s 1015.60 - BRAINPOOLP320r1: 80 0.00032s 3162.38 0.00032s 3077.62 0.00063s 1598.83 0.00136s 737.34 - BRAINPOOLP384r1: 96 0.00041s 2436.88 0.00042s 2395.62 0.00083s 1202.68 0.00178s 562.85 - BRAINPOOLP512r1: 128 0.00063s 1587.60 0.00064s 1558.83 0.00125s 799.96 0.00281s 355.83 - SECP112r1: 28 0.00009s 11118.66 0.00009s 10775.48 0.00018s 5456.00 0.00033s 3020.83 - SECP112r2: 28 0.00009s 11322.97 0.00009s 10857.71 0.00017s 5748.77 0.00032s 3094.28 - SECP128r1: 32 0.00010s 10078.39 0.00010s 9665.27 0.00019s 5200.58 0.00036s 2760.88 - SECP160r1: 42 0.00015s 6875.51 0.00015s 6647.35 0.00029s 3422.41 0.00057s 1768.35 - Ed25519: 64 0.00030s 3322.56 0.00018s 5568.63 0.00046s 2165.35 0.00153s 654.02 - Ed448: 114 0.00060s 1680.53 0.00039s 2567.40 0.00096s 1036.67 0.00350s 285.62 - - ecdh ecdh/s - NIST192p: 0.00050s 1985.70 - NIST224p: 0.00066s 1524.16 - NIST256p: 0.00071s 1413.07 - NIST384p: 0.00127s 788.89 - NIST521p: 0.00230s 434.85 - SECP256k1: 0.00071s 1409.95 - BRAINPOOLP160r1: 0.00042s 2374.65 - BRAINPOOLP192r1: 0.00051s 1960.01 - BRAINPOOLP224r1: 0.00066s 1518.37 - BRAINPOOLP256r1: 0.00071s 1399.90 - BRAINPOOLP320r1: 0.00100s 997.21 - BRAINPOOLP384r1: 0.00129s 777.51 - BRAINPOOLP512r1: 0.00210s 475.99 - SECP112r1: 0.00022s 4457.70 - SECP112r2: 0.00024s 4252.33 - SECP128r1: 0.00028s 3589.31 - SECP160r1: 0.00043s 2305.02 -``` - -(there's also `gmpy` version, execute it using `tox -e speedgmpy`) - -For comparison, a highly optimised implementation (including curve-specific -assembly for some curves), like the one in OpenSSL 1.1.1d, provides the -following performance numbers on the same machine. -Run `openssl speed ecdsa` and `openssl speed ecdh` to reproduce it: -``` - sign verify sign/s verify/s - 192 bits ecdsa (nistp192) 0.0002s 0.0002s 4785.6 5380.7 - 224 bits ecdsa (nistp224) 0.0000s 0.0001s 22475.6 9822.0 - 256 bits ecdsa (nistp256) 0.0000s 0.0001s 45069.6 14166.6 - 384 bits ecdsa (nistp384) 0.0008s 0.0006s 1265.6 1648.1 - 521 bits ecdsa (nistp521) 0.0003s 0.0005s 3753.1 1819.5 - 256 bits ecdsa (brainpoolP256r1) 0.0003s 0.0003s 2983.5 3333.2 - 384 bits ecdsa (brainpoolP384r1) 0.0008s 0.0007s 1258.8 1528.1 - 512 bits ecdsa (brainpoolP512r1) 0.0015s 0.0012s 675.1 860.1 - - sign verify sign/s verify/s - 253 bits EdDSA (Ed25519) 0.0000s 0.0001s 28217.9 10897.7 - 456 bits EdDSA (Ed448) 0.0003s 0.0005s 3926.5 2147.7 - - op op/s - 192 bits ecdh (nistp192) 0.0002s 4853.4 - 224 bits ecdh (nistp224) 0.0001s 15252.1 - 256 bits ecdh (nistp256) 0.0001s 18436.3 - 384 bits ecdh (nistp384) 0.0008s 1292.7 - 521 bits ecdh (nistp521) 0.0003s 2884.7 - 256 bits ecdh (brainpoolP256r1) 0.0003s 3066.5 - 384 bits ecdh (brainpoolP384r1) 0.0008s 1298.0 - 512 bits ecdh (brainpoolP512r1) 0.0014s 694.8 -``` - -Keys and signature can be serialized in different ways (see Usage, below). -For a NIST192p key, the three basic representations require strings of the -following lengths (in bytes): - - to_string: signkey= 24, verifykey= 48, signature=48 - compressed: signkey=n/a, verifykey= 25, signature=n/a - DER: signkey=106, verifykey= 80, signature=55 - PEM: signkey=278, verifykey=162, (no support for PEM signatures) - -## History - -In 2006, Peter Pearson announced his pure-python implementation of ECDSA in a -[message to sci.crypt][1], available from his [download site][2]. In 2010, -Brian Warner wrote a wrapper around this code, to make it a bit easier and -safer to use. In 2020, Hubert Kario included an implementation of elliptic -curve cryptography that uses Jacobian coordinates internally, improving -performance about 20-fold. You are looking at the README for this wrapper. - -[1]: http://www.derkeiler.com/Newsgroups/sci.crypt/2006-01/msg00651.html -[2]: http://webpages.charter.net/curryfans/peter/downloads.html - -## Testing - -To run the full test suite, do this: - - tox -e coverage - -On an Intel Core i7 4790K @ 4.0GHz, the tests take about 18 seconds to execute. -The test suite uses -[`hypothesis`](https://github.com/HypothesisWorks/hypothesis) so there is some -inherent variability in the test suite execution time. - -One part of `test_pyecdsa.py` and `test_ecdh.py` checks compatibility with -OpenSSL, by running the "openssl" CLI tool, make sure it's in your `PATH` if -you want to test compatibility with it (if OpenSSL is missing, too old, or -doesn't support all the curves supported in upstream releases you will see -skipped tests in the above `coverage` run). - -## Security - -This library was not designed with security in mind. If you are processing -data that needs to be protected we suggest you use a quality wrapper around -OpenSSL. [pyca/cryptography](https://cryptography.io) is one example of such -a wrapper. The primary use-case of this library is as a portable library for -interoperability testing and as a teaching tool. - -**This library does not protect against side-channel attacks.** - -Do not allow attackers to measure how long it takes you to generate a key pair -or sign a message. Do not allow attackers to run code on the same physical -machine when key pair generation or signing is taking place (this includes -virtual machines). Do not allow attackers to measure how much power your -computer uses while generating the key pair or signing a message. Do not allow -attackers to measure RF interference coming from your computer while generating -a key pair or signing a message. Note: just loading the private key will cause -key pair generation. Other operations or attack vectors may also be -vulnerable to attacks. **For a sophisticated attacker observing just one -operation with a private key will be sufficient to completely -reconstruct the private key**. - -Please also note that any Pure-python cryptographic library will be vulnerable -to the same side-channel attacks. This is because Python does not provide -side-channel secure primitives (with the exception of -[`hmac.compare_digest()`][3]), making side-channel secure programming -impossible. - -This library depends upon a strong source of random numbers. Do not use it on -a system where `os.urandom()` does not provide cryptographically secure -random numbers. - -[3]: https://docs.python.org/3/library/hmac.html#hmac.compare_digest - -## Usage - -You start by creating a `SigningKey`. You can use this to sign data, by passing -in data as a byte string and getting back the signature (also a byte string). -You can also ask a `SigningKey` to give you the corresponding `VerifyingKey`. -The `VerifyingKey` can be used to verify a signature, by passing it both the -data string and the signature byte string: it either returns True or raises -`BadSignatureError`. - -```python -from ecdsa import SigningKey -sk = SigningKey.generate() # uses NIST192p -vk = sk.verifying_key -signature = sk.sign(b"message") -assert vk.verify(signature, b"message") -``` - -Each `SigningKey`/`VerifyingKey` is associated with a specific curve, like -NIST192p (the default one). Longer curves are more secure, but take longer to -use, and result in longer keys and signatures. - -```python -from ecdsa import SigningKey, NIST384p -sk = SigningKey.generate(curve=NIST384p) -vk = sk.verifying_key -signature = sk.sign(b"message") -assert vk.verify(signature, b"message") -``` - -The `SigningKey` can be serialized into several different formats: the shortest -is to call `s=sk.to_string()`, and then re-create it with -`SigningKey.from_string(s, curve)` . This short form does not record the -curve, so you must be sure to pass to `from_string()` the same curve you used -for the original key. The short form of a NIST192p-based signing key is just 24 -bytes long. If a point encoding is invalid or it does not lie on the specified -curve, `from_string()` will raise `MalformedPointError`. - -```python -from ecdsa import SigningKey, NIST384p -sk = SigningKey.generate(curve=NIST384p) -sk_string = sk.to_string() -sk2 = SigningKey.from_string(sk_string, curve=NIST384p) -print(sk_string.hex()) -print(sk2.to_string().hex()) -``` - -Note: while the methods are called `to_string()` the type they return is -actually `bytes`, the "string" part is leftover from Python 2. - -`sk.to_pem()` and `sk.to_der()` will serialize the signing key into the same -formats that OpenSSL uses. The PEM file looks like the familiar ASCII-armored -`"-----BEGIN EC PRIVATE KEY-----"` base64-encoded format, and the DER format -is a shorter binary form of the same data. -`SigningKey.from_pem()/.from_der()` will undo this serialization. These -formats include the curve name, so you do not need to pass in a curve -identifier to the deserializer. In case the file is malformed `from_der()` -and `from_pem()` will raise `UnexpectedDER` or` MalformedPointError`. - -```python -from ecdsa import SigningKey, NIST384p -sk = SigningKey.generate(curve=NIST384p) -sk_pem = sk.to_pem() -sk2 = SigningKey.from_pem(sk_pem) -# sk and sk2 are the same key -``` - -Likewise, the `VerifyingKey` can be serialized in the same way: -`vk.to_string()/VerifyingKey.from_string()`, `to_pem()/from_pem()`, and -`to_der()/from_der()`. The same `curve=` argument is needed for -`VerifyingKey.from_string()`. - -```python -from ecdsa import SigningKey, VerifyingKey, NIST384p -sk = SigningKey.generate(curve=NIST384p) -vk = sk.verifying_key -vk_string = vk.to_string() -vk2 = VerifyingKey.from_string(vk_string, curve=NIST384p) -# vk and vk2 are the same key - -from ecdsa import SigningKey, VerifyingKey, NIST384p -sk = SigningKey.generate(curve=NIST384p) -vk = sk.verifying_key -vk_pem = vk.to_pem() -vk2 = VerifyingKey.from_pem(vk_pem) -# vk and vk2 are the same key -``` - -There are a couple of different ways to compute a signature. Fundamentally, -ECDSA takes a number that represents the data being signed, and returns a -pair of numbers that represent the signature. The `hashfunc=` argument to -`sk.sign()` and `vk.verify()` is used to turn an arbitrary string into a -fixed-length digest, which is then turned into a number that ECDSA can sign, -and both sign and verify must use the same approach. The default value is -`hashlib.sha1`, but if you use NIST256p or a longer curve, you can use -`hashlib.sha256` instead. - -There are also multiple ways to represent a signature. The default -`sk.sign()` and `vk.verify()` methods present it as a short string, for -simplicity and minimal overhead. To use a different scheme, use the -`sk.sign(sigencode=)` and `vk.verify(sigdecode=)` arguments. There are helper -functions in the `ecdsa.util` module that can be useful here. - -It is also possible to create a `SigningKey` from a "seed", which is -deterministic. This can be used in protocols where you want to derive -consistent signing keys from some other secret, for example when you want -three separate keys and only want to store a single master secret. You should -start with a uniformly-distributed unguessable seed with about `curve.baselen` -bytes of entropy, and then use one of the helper functions in `ecdsa.util` to -convert it into an integer in the correct range, and then finally pass it -into `SigningKey.from_secret_exponent()`, like this: - -```python -import os -from ecdsa import NIST384p, SigningKey -from ecdsa.util import randrange_from_seed__trytryagain - -def make_key(seed): - secexp = randrange_from_seed__trytryagain(seed, NIST384p.order) - return SigningKey.from_secret_exponent(secexp, curve=NIST384p) - -seed = os.urandom(NIST384p.baselen) # or other starting point -sk1a = make_key(seed) -sk1b = make_key(seed) -# note: sk1a and sk1b are the same key -assert sk1a.to_string() == sk1b.to_string() -sk2 = make_key(b"2-"+seed) # different key -assert sk1a.to_string() != sk2.to_string() -``` - -In case the application will verify a lot of signatures made with a single -key, it's possible to precompute some of the internal values to make -signature verification significantly faster. The break-even point occurs at -about 100 signatures verified. - -To perform precomputation, you can call the `precompute()` method -on `VerifyingKey` instance: -```python -from ecdsa import SigningKey, NIST384p -sk = SigningKey.generate(curve=NIST384p) -vk = sk.verifying_key -vk.precompute() -signature = sk.sign(b"message") -assert vk.verify(signature, b"message") -``` - -Once `precompute()` was called, all signature verifications with this key will -be faster to execute. - -## OpenSSL Compatibility - -To produce signatures that can be verified by OpenSSL tools, or to verify -signatures that were produced by those tools, use: - -```python -# openssl ecparam -name prime256v1 -genkey -out sk.pem -# openssl ec -in sk.pem -pubout -out vk.pem -# echo "data for signing" > data -# openssl dgst -sha256 -sign sk.pem -out data.sig data -# openssl dgst -sha256 -verify vk.pem -signature data.sig data -# openssl dgst -sha256 -prverify sk.pem -signature data.sig data - -import hashlib -from ecdsa import SigningKey, VerifyingKey -from ecdsa.util import sigencode_der, sigdecode_der - -with open("vk.pem") as f: - vk = VerifyingKey.from_pem(f.read()) - -with open("data", "rb") as f: - data = f.read() - -with open("data.sig", "rb") as f: - signature = f.read() - -assert vk.verify(signature, data, hashlib.sha256, sigdecode=sigdecode_der) - -with open("sk.pem") as f: - sk = SigningKey.from_pem(f.read(), hashlib.sha256) - -new_signature = sk.sign_deterministic(data, sigencode=sigencode_der) - -with open("data.sig2", "wb") as f: - f.write(new_signature) - -# openssl dgst -sha256 -verify vk.pem -signature data.sig2 data -``` - -Note: if compatibility with OpenSSL 1.0.0 or earlier is necessary, the -`sigencode_string` and `sigdecode_string` from `ecdsa.util` can be used for -respectively writing and reading the signatures. - -The keys also can be written in format that openssl can handle: - -```python -from ecdsa import SigningKey, VerifyingKey - -with open("sk.pem") as f: - sk = SigningKey.from_pem(f.read()) -with open("sk.pem", "wb") as f: - f.write(sk.to_pem()) - -with open("vk.pem") as f: - vk = VerifyingKey.from_pem(f.read()) -with open("vk.pem", "wb") as f: - f.write(vk.to_pem()) -``` - -## Entropy - -Creating a signing key with `SigningKey.generate()` requires some form of -entropy (as opposed to -`from_secret_exponent`/`from_string`/`from_der`/`from_pem`, -which are deterministic and do not require an entropy source). The default -source is `os.urandom()`, but you can pass any other function that behaves -like `os.urandom` as the `entropy=` argument to do something different. This -may be useful in unit tests, where you want to achieve repeatable results. The -`ecdsa.util.PRNG` utility is handy here: it takes a seed and produces a strong -pseudo-random stream from it: - -```python -from ecdsa.util import PRNG -from ecdsa import SigningKey -rng1 = PRNG(b"seed") -sk1 = SigningKey.generate(entropy=rng1) -rng2 = PRNG(b"seed") -sk2 = SigningKey.generate(entropy=rng2) -# sk1 and sk2 are the same key -``` - -Likewise, ECDSA signature generation requires a random number, and each -signature must use a different one (using the same number twice will -immediately reveal the private signing key). The `sk.sign()` method takes an -`entropy=` argument which behaves the same as `SigningKey.generate(entropy=)`. - -## Deterministic Signatures - -If you call `SigningKey.sign_deterministic(data)` instead of `.sign(data)`, -the code will generate a deterministic signature instead of a random one. -This uses the algorithm from RFC6979 to safely generate a unique `k` value, -derived from the private key and the message being signed. Each time you sign -the same message with the same key, you will get the same signature (using -the same `k`). - -This may become the default in a future version, as it is not vulnerable to -failures of the entropy source. - -## Examples - -Create a NIST192p key pair and immediately save both to disk: - -```python -from ecdsa import SigningKey -sk = SigningKey.generate() -vk = sk.verifying_key -with open("private.pem", "wb") as f: - f.write(sk.to_pem()) -with open("public.pem", "wb") as f: - f.write(vk.to_pem()) -``` - -Load a signing key from disk, use it to sign a message (using SHA-1), and write -the signature to disk: - -```python -from ecdsa import SigningKey -with open("private.pem") as f: - sk = SigningKey.from_pem(f.read()) -with open("message", "rb") as f: - message = f.read() -sig = sk.sign(message) -with open("signature", "wb") as f: - f.write(sig) -``` - -Load the verifying key, message, and signature from disk, and verify the -signature (assume SHA-1 hash): - -```python -from ecdsa import VerifyingKey, BadSignatureError -vk = VerifyingKey.from_pem(open("public.pem").read()) -with open("message", "rb") as f: - message = f.read() -with open("signature", "rb") as f: - sig = f.read() -try: - vk.verify(sig, message) - print "good signature" -except BadSignatureError: - print "BAD SIGNATURE" -``` - -Create a NIST521p key pair: - -```python -from ecdsa import SigningKey, NIST521p -sk = SigningKey.generate(curve=NIST521p) -vk = sk.verifying_key -``` - -Create three independent signing keys from a master seed: - -```python -from ecdsa import NIST192p, SigningKey -from ecdsa.util import randrange_from_seed__trytryagain - -def make_key_from_seed(seed, curve=NIST192p): - secexp = randrange_from_seed__trytryagain(seed, curve.order) - return SigningKey.from_secret_exponent(secexp, curve) - -sk1 = make_key_from_seed("1:%s" % seed) -sk2 = make_key_from_seed("2:%s" % seed) -sk3 = make_key_from_seed("3:%s" % seed) -``` - -Load a verifying key from disk and print it using hex encoding in -uncompressed and compressed format (defined in X9.62 and SEC1 standards): - -```python -from ecdsa import VerifyingKey - -with open("public.pem") as f: - vk = VerifyingKey.from_pem(f.read()) - -print("uncompressed: {0}".format(vk.to_string("uncompressed").hex())) -print("compressed: {0}".format(vk.to_string("compressed").hex())) -``` - -Load a verifying key from a hex string from compressed format, output -uncompressed: - -```python -from ecdsa import VerifyingKey, NIST256p - -comp_str = '022799c0d0ee09772fdd337d4f28dc155581951d07082fb19a38aa396b67e77759' -vk = VerifyingKey.from_string(bytearray.fromhex(comp_str), curve=NIST256p) -print(vk.to_string("uncompressed").hex()) -``` - -ECDH key exchange with remote party: - -```python -from ecdsa import ECDH, NIST256p - -ecdh = ECDH(curve=NIST256p) -ecdh.generate_private_key() -local_public_key = ecdh.get_public_key() -#send `local_public_key` to remote party and receive `remote_public_key` from remote party -with open("remote_public_key.pem") as e: - remote_public_key = e.read() -ecdh.load_received_public_key_pem(remote_public_key) -secret = ecdh.generate_sharedsecret_bytes() -``` diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/RECORD deleted file mode 100644 index 83c83066..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/RECORD +++ /dev/null @@ -1,66 +0,0 @@ -ecdsa-0.19.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -ecdsa-0.19.1.dist-info/LICENSE,sha256=PsqYRXc9LluMydjBGdNF8ApIBuS9Zg1KPWzfnA6di7I,1147 -ecdsa-0.19.1.dist-info/METADATA,sha256=pgYAeXZAWaSlZnvx6ZgK6RpQAbgo7nhRuptSt4T71hY,29641 -ecdsa-0.19.1.dist-info/RECORD,, -ecdsa-0.19.1.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110 -ecdsa-0.19.1.dist-info/top_level.txt,sha256=7ovPHfAPyTou19f8gOSbHm6B9dGjTibWolcCB7Zjovs,6 -ecdsa/__init__.py,sha256=wRUnU3g01MM-mLNemnov-_8B81DlbSrqoFtOO4bZzQQ,1931 -ecdsa/__pycache__/__init__.cpython-312.pyc,, -ecdsa/__pycache__/_compat.cpython-312.pyc,, -ecdsa/__pycache__/_rwlock.cpython-312.pyc,, -ecdsa/__pycache__/_sha3.cpython-312.pyc,, -ecdsa/__pycache__/_version.cpython-312.pyc,, -ecdsa/__pycache__/curves.cpython-312.pyc,, -ecdsa/__pycache__/der.cpython-312.pyc,, -ecdsa/__pycache__/ecdh.cpython-312.pyc,, -ecdsa/__pycache__/ecdsa.cpython-312.pyc,, -ecdsa/__pycache__/eddsa.cpython-312.pyc,, -ecdsa/__pycache__/ellipticcurve.cpython-312.pyc,, -ecdsa/__pycache__/errors.cpython-312.pyc,, -ecdsa/__pycache__/keys.cpython-312.pyc,, -ecdsa/__pycache__/numbertheory.cpython-312.pyc,, -ecdsa/__pycache__/rfc6979.cpython-312.pyc,, -ecdsa/__pycache__/ssh.cpython-312.pyc,, -ecdsa/__pycache__/test_curves.cpython-312.pyc,, -ecdsa/__pycache__/test_der.cpython-312.pyc,, -ecdsa/__pycache__/test_ecdh.cpython-312.pyc,, -ecdsa/__pycache__/test_ecdsa.cpython-312.pyc,, -ecdsa/__pycache__/test_eddsa.cpython-312.pyc,, -ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc,, -ecdsa/__pycache__/test_jacobi.cpython-312.pyc,, -ecdsa/__pycache__/test_keys.cpython-312.pyc,, -ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc,, -ecdsa/__pycache__/test_numbertheory.cpython-312.pyc,, -ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc,, -ecdsa/__pycache__/test_rw_lock.cpython-312.pyc,, -ecdsa/__pycache__/test_sha3.cpython-312.pyc,, -ecdsa/__pycache__/util.cpython-312.pyc,, -ecdsa/_compat.py,sha256=5EP735DlmaSb8Slk4BMnh9Z7MvYpTnGS3S_DBmHETwo,4047 -ecdsa/_rwlock.py,sha256=CAwHp2V65ksI8B1UqY7EccK9LaUToiv6pDLVzm44eag,2849 -ecdsa/_sha3.py,sha256=DJs7QLmdkQMU35llyD8HQeAXNvf5sMcujO6oFdScIqI,4747 -ecdsa/_version.py,sha256=6Q6w07SB50LWxDw7_U4KvjfFSQLiAZWHvYT_iDehYqw,498 -ecdsa/curves.py,sha256=EcPE0WRFkjpPMZfVM0-hRQ63CdW5GKTkDRpK-VOy1Zo,15975 -ecdsa/der.py,sha256=wXf8ftHJLdJZ2MjpQjeT5ji_qTj05dJtqCsvmwjvbuo,16444 -ecdsa/ecdh.py,sha256=Tiirawt5xegVDrY9eS-ATvvfmTIznUyv5fy2k7VnzTk,11011 -ecdsa/ecdsa.py,sha256=82ECyve36rL4_y8Zk8jPB-Uyb67Ppx6CYdqIeFniv7w,31699 -ecdsa/eddsa.py,sha256=IzsGzoGAefcoYjF7DVjFkX5ZJqiK2LTOmAMe6wyf4UU,7170 -ecdsa/ellipticcurve.py,sha256=OhoxJsJJ_7Dp5e10AblD_ui9Uo2WxozRby37gxqcuOk,54296 -ecdsa/errors.py,sha256=b4mhnmIpRnEdHzbectHAA5F7O9MtSaI-fYoc13_vBxQ,130 -ecdsa/keys.py,sha256=FkpubL9MGCfkCJ6aVvV5c0bve4ZuBtU3P1dRRBTth0k,65503 -ecdsa/numbertheory.py,sha256=Ad2-mVFaOytMomBC-7d0cJao4tpkVLh4x7vyqj73G6A,17831 -ecdsa/rfc6979.py,sha256=zwzo33lsZJA9r2dSf7HCliI_yIbw5cJ0Ek9tLdRRO40,2850 -ecdsa/ssh.py,sha256=360JY0dbYeZaqx9k_OhlHzPZYZTw5UB5xtK2XLBqb0M,1916 -ecdsa/test_curves.py,sha256=l5N-m4Yo5IAy4a8aJMsBamaSlLAfSoYjYqCj1HDEVpU,13081 -ecdsa/test_der.py,sha256=Bqbu3QiESyl8X9bjR2IBzDW35fDHTVB-mhzk5qXgGQQ,18915 -ecdsa/test_ecdh.py,sha256=20TEYyGcnynAPS9nlr_k2ed6S7lJ1Z0bbohNU-pEGco,15380 -ecdsa/test_ecdsa.py,sha256=V5Q4Q7uUFfStVdeFtwhXVpFlFi0Nx5J5uru9T3j_3DQ,25037 -ecdsa/test_eddsa.py,sha256=1jfHF_ZSyillv6DLKJZ0SeYThauTCHsyRNAweS_4MEQ,33720 -ecdsa/test_ellipticcurve.py,sha256=mvIBD_KKrOY4Vpd4KcR3J8RCBxINJtqWls1WMEsFgnE,8798 -ecdsa/test_jacobi.py,sha256=SVrS--PSwDakCYit216khmzF8eQryWtiI3sYWd_6890,25811 -ecdsa/test_keys.py,sha256=reQ2KtfOsANa59CqvvRWjZm-SaWfscJZVJaE9RFyL94,39415 -ecdsa/test_malformed_sigs.py,sha256=Dg1Dkvgz1tO-KhZ6f9ZRljykz8FBYtld3hq1W2hLLM8,11289 -ecdsa/test_numbertheory.py,sha256=YTtclt_50n_24U__r2JyV3LEqR2BsBmA6IJghwBDz8c,13265 -ecdsa/test_pyecdsa.py,sha256=2zoWfR_xAqagKOFHVJt4dTZZacu1zT4JxXH_HJUABW8,91272 -ecdsa/test_rw_lock.py,sha256=byv0_FTM90cbuHPCI6__LeQJkHL_zYEeVYIBO8e2LLc,7021 -ecdsa/test_sha3.py,sha256=0PkWi7AnTJ10YNfDVqj2SB7adeTbV6DCvMg4n9ULad8,3042 -ecdsa/util.py,sha256=UixXH6PR9poMrWQJ93rKpUSVZHxP1AmLwlkrkI3FWFE,18006 diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/WHEEL deleted file mode 100644 index c34f1162..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.2) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/top_level.txt deleted file mode 100644 index aa5efdb5..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa-0.19.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ecdsa diff --git a/venv/lib/python3.12/site-packages/ecdsa/__init__.py b/venv/lib/python3.12/site-packages/ecdsa/__init__.py deleted file mode 100644 index 342538e0..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# while we don't use six in this file, we did bundle it for a long time, so -# keep as part of module in a virtual way (through __all__) -import six -from .keys import ( - SigningKey, - VerifyingKey, - BadSignatureError, - BadDigestError, - MalformedPointError, -) -from .curves import ( - NIST192p, - NIST224p, - NIST256p, - NIST384p, - NIST521p, - SECP256k1, - BRAINPOOLP160r1, - BRAINPOOLP192r1, - BRAINPOOLP224r1, - BRAINPOOLP256r1, - BRAINPOOLP320r1, - BRAINPOOLP384r1, - BRAINPOOLP512r1, - SECP112r1, - SECP112r2, - SECP128r1, - SECP160r1, - Ed25519, - Ed448, - BRAINPOOLP160t1, - BRAINPOOLP192t1, - BRAINPOOLP224t1, - BRAINPOOLP256t1, - BRAINPOOLP320t1, - BRAINPOOLP384t1, - BRAINPOOLP512t1, -) -from .ecdh import ( - ECDH, - NoKeyError, - NoCurveError, - InvalidCurveError, - InvalidSharedSecretError, -) -from .der import UnexpectedDER -from . import _version - -# This code comes from http://github.com/tlsfuzzer/python-ecdsa -__all__ = [ - "curves", - "der", - "ecdsa", - "ellipticcurve", - "keys", - "numbertheory", - "test_pyecdsa", - "util", - "six", -] - -_hush_pyflakes = [ - SigningKey, - VerifyingKey, - BadSignatureError, - BadDigestError, - MalformedPointError, - UnexpectedDER, - InvalidCurveError, - NoKeyError, - InvalidSharedSecretError, - ECDH, - NoCurveError, - NIST192p, - NIST224p, - NIST256p, - NIST384p, - NIST521p, - SECP256k1, - BRAINPOOLP160r1, - BRAINPOOLP192r1, - BRAINPOOLP224r1, - BRAINPOOLP256r1, - BRAINPOOLP320r1, - BRAINPOOLP384r1, - BRAINPOOLP512r1, - SECP112r1, - SECP112r2, - SECP128r1, - SECP160r1, - Ed25519, - Ed448, - six.b(""), - BRAINPOOLP160t1, - BRAINPOOLP192t1, - BRAINPOOLP224t1, - BRAINPOOLP256t1, - BRAINPOOLP320t1, - BRAINPOOLP384t1, - BRAINPOOLP512t1, -] -del _hush_pyflakes - -__version__ = _version.get_versions()["version"] diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d8efe89b..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_compat.cpython-312.pyc deleted file mode 100644 index 9599b33d..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_compat.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_rwlock.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_rwlock.cpython-312.pyc deleted file mode 100644 index 684b1f0d..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_rwlock.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_sha3.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_sha3.cpython-312.pyc deleted file mode 100644 index a7a0d3af..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_sha3.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_version.cpython-312.pyc deleted file mode 100644 index 2e1845f9..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/_version.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/curves.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/curves.cpython-312.pyc deleted file mode 100644 index c31c9d1e..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/curves.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/der.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/der.cpython-312.pyc deleted file mode 100644 index 93eba4f6..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/der.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdh.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdh.cpython-312.pyc deleted file mode 100644 index 23450b31..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdh.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdsa.cpython-312.pyc deleted file mode 100644 index 27a53294..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ecdsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/eddsa.cpython-312.pyc deleted file mode 100644 index fa9c6c08..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/eddsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-312.pyc deleted file mode 100644 index a794f63c..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ellipticcurve.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/errors.cpython-312.pyc deleted file mode 100644 index 63543bf6..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/errors.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/keys.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/keys.cpython-312.pyc deleted file mode 100644 index 0ce7c4fb..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/keys.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/numbertheory.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/numbertheory.cpython-312.pyc deleted file mode 100644 index af81f397..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/numbertheory.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/rfc6979.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/rfc6979.cpython-312.pyc deleted file mode 100644 index 84b19b92..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/rfc6979.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ssh.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ssh.cpython-312.pyc deleted file mode 100644 index 886d8a32..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/ssh.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_curves.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_curves.cpython-312.pyc deleted file mode 100644 index 3ce719a2..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_curves.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_der.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_der.cpython-312.pyc deleted file mode 100644 index 0cf128c7..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_der.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdh.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdh.cpython-312.pyc deleted file mode 100644 index ba118d00..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdh.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-312.pyc deleted file mode 100644 index b37236f7..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ecdsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_eddsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_eddsa.cpython-312.pyc deleted file mode 100644 index ec78401f..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_eddsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc deleted file mode 100644 index 9d43c5af..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_ellipticcurve.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_jacobi.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_jacobi.cpython-312.pyc deleted file mode 100644 index a60b2a7e..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_jacobi.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_keys.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_keys.cpython-312.pyc deleted file mode 100644 index 78647008..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_keys.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc deleted file mode 100644 index 812124f8..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_malformed_sigs.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-312.pyc deleted file mode 100644 index 665dc7ed..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_numbertheory.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc deleted file mode 100644 index a07dccf9..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_pyecdsa.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-312.pyc deleted file mode 100644 index f3a29f4c..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_rw_lock.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_sha3.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_sha3.cpython-312.pyc deleted file mode 100644 index b76bc46a..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/test_sha3.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/util.cpython-312.pyc b/venv/lib/python3.12/site-packages/ecdsa/__pycache__/util.cpython-312.pyc deleted file mode 100644 index 97bafa85..00000000 Binary files a/venv/lib/python3.12/site-packages/ecdsa/__pycache__/util.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/ecdsa/_compat.py b/venv/lib/python3.12/site-packages/ecdsa/_compat.py deleted file mode 100644 index 4558e33a..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/_compat.py +++ /dev/null @@ -1,138 +0,0 @@ -""" -Common functions for providing cross-python version compatibility. -""" -import sys -import re -import binascii -from six import integer_types - - -def str_idx_as_int(string, index): - """Take index'th byte from string, return as integer""" - val = string[index] - if isinstance(val, integer_types): - return val - return ord(val) - - -if sys.version_info < (3, 0): # pragma: no branch - import platform - - def normalise_bytes(buffer_object): - """Cast the input into array of bytes.""" - # flake8 runs on py3 where `buffer` indeed doesn't exist... - return buffer(buffer_object) # noqa: F821 - - def hmac_compat(ret): - return ret - - if ( - sys.version_info < (2, 7) - or sys.version_info < (2, 7, 4) - or platform.system() == "Java" - ): # pragma: no branch - - def remove_whitespace(text): - """Removes all whitespace from passed in string""" - return re.sub(r"\s+", "", text) - - def compat26_str(val): - return str(val) - - def bit_length(val): - if val == 0: - return 0 - return len(bin(val)) - 2 - - else: - - def remove_whitespace(text): - """Removes all whitespace from passed in string""" - return re.sub(r"\s+", "", text, flags=re.UNICODE) - - def compat26_str(val): - return val - - def bit_length(val): - """Return number of bits necessary to represent an integer.""" - return val.bit_length() - - def b2a_hex(val): - return binascii.b2a_hex(compat26_str(val)) - - def a2b_hex(val): - try: - return bytearray(binascii.a2b_hex(val)) - except Exception as e: - raise ValueError("base16 error: %s" % e) - - def bytes_to_int(val, byteorder): - """Convert bytes to an int.""" - if not val: - return 0 - if byteorder == "big": - return int(b2a_hex(val), 16) - if byteorder == "little": - return int(b2a_hex(val[::-1]), 16) - raise ValueError("Only 'big' and 'little' endian supported") - - def int_to_bytes(val, length=None, byteorder="big"): - """Return number converted to bytes""" - if length is None: - length = byte_length(val) - if byteorder == "big": - return bytearray( - (val >> i) & 0xFF for i in reversed(range(0, length * 8, 8)) - ) - if byteorder == "little": - return bytearray( - (val >> i) & 0xFF for i in range(0, length * 8, 8) - ) - raise ValueError("Only 'big' or 'little' endian supported") - -else: - - def hmac_compat(data): - return data - - def normalise_bytes(buffer_object): - """Cast the input into array of bytes.""" - return memoryview(buffer_object).cast("B") - - def compat26_str(val): - return val - - def remove_whitespace(text): - """Removes all whitespace from passed in string""" - return re.sub(r"\s+", "", text, flags=re.UNICODE) - - def a2b_hex(val): - try: - return bytearray(binascii.a2b_hex(bytearray(val, "ascii"))) - except Exception as e: - raise ValueError("base16 error: %s" % e) - - # pylint: disable=invalid-name - # pylint is stupid here and doesn't notice it's a function, not - # constant - bytes_to_int = int.from_bytes - # pylint: enable=invalid-name - - def bit_length(val): - """Return number of bits necessary to represent an integer.""" - return val.bit_length() - - def int_to_bytes(val, length=None, byteorder="big"): - """Convert integer to bytes.""" - if length is None: - length = byte_length(val) - # for gmpy we need to convert back to native int - if not isinstance(val, int): - val = int(val) - return bytearray(val.to_bytes(length=length, byteorder=byteorder)) - - -def byte_length(val): - """Return number of bytes necessary to represent an integer.""" - length = bit_length(val) - return (length + 7) // 8 diff --git a/venv/lib/python3.12/site-packages/ecdsa/_rwlock.py b/venv/lib/python3.12/site-packages/ecdsa/_rwlock.py deleted file mode 100644 index 010e4981..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/_rwlock.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright Mateusz Kobos, (c) 2011 -# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ -# released under the MIT licence - -import threading - - -__author__ = "Mateusz Kobos" - - -class RWLock: - """ - Read-Write locking primitive - - Synchronization object used in a solution of so-called second - readers-writers problem. In this problem, many readers can simultaneously - access a share, and a writer has an exclusive access to this share. - Additionally, the following constraints should be met: - 1) no reader should be kept waiting if the share is currently opened for - reading unless a writer is also waiting for the share, - 2) no writer should be kept waiting for the share longer than absolutely - necessary. - - The implementation is based on [1, secs. 4.2.2, 4.2.6, 4.2.7] - with a modification -- adding an additional lock (C{self.__readers_queue}) - -- in accordance with [2]. - - Sources: - [1] A.B. Downey: "The little book of semaphores", Version 2.1.5, 2008 - [2] P.J. Courtois, F. Heymans, D.L. Parnas: - "Concurrent Control with 'Readers' and 'Writers'", - Communications of the ACM, 1971 (via [3]) - [3] http://en.wikipedia.org/wiki/Readers-writers_problem - """ - - def __init__(self): - """ - A lock giving an even higher priority to the writer in certain - cases (see [2] for a discussion). - """ - self.__read_switch = _LightSwitch() - self.__write_switch = _LightSwitch() - self.__no_readers = threading.Lock() - self.__no_writers = threading.Lock() - self.__readers_queue = threading.Lock() - - def reader_acquire(self): - self.__readers_queue.acquire() - self.__no_readers.acquire() - self.__read_switch.acquire(self.__no_writers) - self.__no_readers.release() - self.__readers_queue.release() - - def reader_release(self): - self.__read_switch.release(self.__no_writers) - - def writer_acquire(self): - self.__write_switch.acquire(self.__no_readers) - self.__no_writers.acquire() - - def writer_release(self): - self.__no_writers.release() - self.__write_switch.release(self.__no_readers) - - -class _LightSwitch: - """An auxiliary "light switch"-like object. The first thread turns on the - "switch", the last one turns it off (see [1, sec. 4.2.2] for details).""" - - def __init__(self): - self.__counter = 0 - self.__mutex = threading.Lock() - - def acquire(self, lock): - self.__mutex.acquire() - self.__counter += 1 - if self.__counter == 1: - lock.acquire() - self.__mutex.release() - - def release(self, lock): - self.__mutex.acquire() - self.__counter -= 1 - if self.__counter == 0: - lock.release() - self.__mutex.release() diff --git a/venv/lib/python3.12/site-packages/ecdsa/_sha3.py b/venv/lib/python3.12/site-packages/ecdsa/_sha3.py deleted file mode 100644 index 2db00586..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/_sha3.py +++ /dev/null @@ -1,181 +0,0 @@ -""" -Implementation of the SHAKE-256 algorithm for Ed448 -""" - -try: - import hashlib - - hashlib.new("shake256").digest(64) - - def shake_256(msg, outlen): - return hashlib.new("shake256", msg).digest(outlen) - -except (TypeError, ValueError): - - from ._compat import bytes_to_int, int_to_bytes - - # From little endian. - def _from_le(s): - return bytes_to_int(s, byteorder="little") - - # Rotate a word x by b places to the left. - def _rol(x, b): - return ((x << b) | (x >> (64 - b))) & (2**64 - 1) - - # Do the SHA-3 state transform on state s. - def _sha3_transform(s): - ROTATIONS = [ - 0, - 1, - 62, - 28, - 27, - 36, - 44, - 6, - 55, - 20, - 3, - 10, - 43, - 25, - 39, - 41, - 45, - 15, - 21, - 8, - 18, - 2, - 61, - 56, - 14, - ] - PERMUTATION = [ - 1, - 6, - 9, - 22, - 14, - 20, - 2, - 12, - 13, - 19, - 23, - 15, - 4, - 24, - 21, - 8, - 16, - 5, - 3, - 18, - 17, - 11, - 7, - 10, - ] - RC = [ - 0x0000000000000001, - 0x0000000000008082, - 0x800000000000808A, - 0x8000000080008000, - 0x000000000000808B, - 0x0000000080000001, - 0x8000000080008081, - 0x8000000000008009, - 0x000000000000008A, - 0x0000000000000088, - 0x0000000080008009, - 0x000000008000000A, - 0x000000008000808B, - 0x800000000000008B, - 0x8000000000008089, - 0x8000000000008003, - 0x8000000000008002, - 0x8000000000000080, - 0x000000000000800A, - 0x800000008000000A, - 0x8000000080008081, - 0x8000000000008080, - 0x0000000080000001, - 0x8000000080008008, - ] - - for rnd in range(0, 24): - # AddColumnParity (Theta) - c = [0] * 5 - d = [0] * 5 - for i in range(0, 25): - c[i % 5] ^= s[i] - for i in range(0, 5): - d[i] = c[(i + 4) % 5] ^ _rol(c[(i + 1) % 5], 1) - for i in range(0, 25): - s[i] ^= d[i % 5] - # RotateWords (Rho) - for i in range(0, 25): - s[i] = _rol(s[i], ROTATIONS[i]) - # PermuteWords (Pi) - t = s[PERMUTATION[0]] - for i in range(0, len(PERMUTATION) - 1): - s[PERMUTATION[i]] = s[PERMUTATION[i + 1]] - s[PERMUTATION[-1]] = t - # NonlinearMixRows (Chi) - for i in range(0, 25, 5): - t = [ - s[i], - s[i + 1], - s[i + 2], - s[i + 3], - s[i + 4], - s[i], - s[i + 1], - ] - for j in range(0, 5): - s[i + j] = t[j] ^ ((~t[j + 1]) & (t[j + 2])) - # AddRoundConstant (Iota) - s[0] ^= RC[rnd] - - # Reinterpret octet array b to word array and XOR it to state s. - def _reinterpret_to_words_and_xor(s, b): - for j in range(0, len(b) // 8): - s[j] ^= _from_le(b[8 * j : 8 * j + 8]) - - # Reinterpret word array w to octet array and return it. - def _reinterpret_to_octets(w): - mp = bytearray() - for j in range(0, len(w)): - mp += int_to_bytes(w[j], 8, byteorder="little") - return mp - - def _sha3_raw(msg, r_w, o_p, e_b): - """Semi-generic SHA-3 implementation""" - r_b = 8 * r_w - s = [0] * 25 - # Handle whole blocks. - idx = 0 - blocks = len(msg) // r_b - for i in range(0, blocks): - _reinterpret_to_words_and_xor(s, msg[idx : idx + r_b]) - idx += r_b - _sha3_transform(s) - # Handle last block padding. - m = bytearray(msg[idx:]) - m.append(o_p) - while len(m) < r_b: - m.append(0) - m[len(m) - 1] |= 128 - # Handle padded last block. - _reinterpret_to_words_and_xor(s, m) - _sha3_transform(s) - # Output. - out = bytearray() - while len(out) < e_b: - out += _reinterpret_to_octets(s[:r_w]) - _sha3_transform(s) - return out[:e_b] - - def shake_256(msg, outlen): - return _sha3_raw(msg, 17, 31, outlen) diff --git a/venv/lib/python3.12/site-packages/ecdsa/_version.py b/venv/lib/python3.12/site-packages/ecdsa/_version.py deleted file mode 100644 index d910ff93..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/_version.py +++ /dev/null @@ -1,21 +0,0 @@ - -# This file was generated by 'versioneer.py' (0.21) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -{ - "date": "2025-03-13T12:48:15+0100", - "dirty": false, - "error": null, - "full-revisionid": "2a6593d840ad153a16ebdd4f9b772b290494f3e3", - "version": "0.19.1" -} -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) diff --git a/venv/lib/python3.12/site-packages/ecdsa/curves.py b/venv/lib/python3.12/site-packages/ecdsa/curves.py deleted file mode 100644 index 38e3a758..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/curves.py +++ /dev/null @@ -1,590 +0,0 @@ -from __future__ import division - -from six import PY2 -from . import der, ecdsa, ellipticcurve, eddsa -from .util import orderlen, number_to_string, string_to_number -from ._compat import normalise_bytes, bit_length - - -# orderlen was defined in this module previously, so keep it in __all__, -# will need to mark it as deprecated later -__all__ = [ - "UnknownCurveError", - "orderlen", - "Curve", - "SECP112r1", - "SECP112r2", - "SECP128r1", - "SECP160r1", - "NIST192p", - "NIST224p", - "NIST256p", - "NIST384p", - "NIST521p", - "curves", - "find_curve", - "curve_by_name", - "SECP256k1", - "BRAINPOOLP160r1", - "BRAINPOOLP160t1", - "BRAINPOOLP192r1", - "BRAINPOOLP192t1", - "BRAINPOOLP224r1", - "BRAINPOOLP224t1", - "BRAINPOOLP256r1", - "BRAINPOOLP256t1", - "BRAINPOOLP320r1", - "BRAINPOOLP320t1", - "BRAINPOOLP384r1", - "BRAINPOOLP384t1", - "BRAINPOOLP512r1", - "BRAINPOOLP512t1", - "PRIME_FIELD_OID", - "CHARACTERISTIC_TWO_FIELD_OID", - "Ed25519", - "Ed448", -] - - -PRIME_FIELD_OID = (1, 2, 840, 10045, 1, 1) -CHARACTERISTIC_TWO_FIELD_OID = (1, 2, 840, 10045, 1, 2) - - -class UnknownCurveError(Exception): - pass - - -class Curve: - def __init__(self, name, curve, generator, oid, openssl_name=None): - self.name = name - self.openssl_name = openssl_name # maybe None - self.curve = curve - self.generator = generator - self.order = generator.order() - if isinstance(curve, ellipticcurve.CurveEdTw): - # EdDSA keys are special in that both private and public - # are the same size (as it's defined only with compressed points) - - # +1 for the sign bit and then round up - self.baselen = (bit_length(curve.p()) + 1 + 7) // 8 - self.verifying_key_length = self.baselen - else: - self.baselen = orderlen(self.order) - self.verifying_key_length = 2 * orderlen(curve.p()) - self.signature_length = 2 * self.baselen - self.oid = oid - if oid: - self.encoded_oid = der.encode_oid(*oid) - - def __eq__(self, other): - if isinstance(other, Curve): - return ( - self.curve == other.curve and self.generator == other.generator - ) - return NotImplemented - - def __ne__(self, other): - return not self == other - - def __repr__(self): - return self.name - - def to_der(self, encoding=None, point_encoding="uncompressed"): - """Serialise the curve parameters to binary string. - - :param str encoding: the format to save the curve parameters in. - Default is ``named_curve``, with fallback being the ``explicit`` - if the OID is not set for the curve. - :param str point_encoding: the point encoding of the generator when - explicit curve encoding is used. Ignored for ``named_curve`` - format. - - :return: DER encoded ECParameters structure - :rtype: bytes - """ - if encoding is None: - if self.oid: - encoding = "named_curve" - else: - encoding = "explicit" - - if encoding not in ("named_curve", "explicit"): - raise ValueError( - "Only 'named_curve' and 'explicit' encodings supported" - ) - - if encoding == "named_curve": - if not self.oid: - raise UnknownCurveError( - "Can't encode curve using named_curve encoding without " - "associated curve OID" - ) - return der.encode_oid(*self.oid) - elif isinstance(self.curve, ellipticcurve.CurveEdTw): - assert encoding == "explicit" - raise UnknownCurveError( - "Twisted Edwards curves don't support explicit encoding" - ) - - # encode the ECParameters sequence - curve_p = self.curve.p() - version = der.encode_integer(1) - field_id = der.encode_sequence( - der.encode_oid(*PRIME_FIELD_OID), der.encode_integer(curve_p) - ) - curve = der.encode_sequence( - der.encode_octet_string( - number_to_string(self.curve.a() % curve_p, curve_p) - ), - der.encode_octet_string( - number_to_string(self.curve.b() % curve_p, curve_p) - ), - ) - base = der.encode_octet_string(self.generator.to_bytes(point_encoding)) - order = der.encode_integer(self.generator.order()) - seq_elements = [version, field_id, curve, base, order] - if self.curve.cofactor(): - cofactor = der.encode_integer(self.curve.cofactor()) - seq_elements.append(cofactor) - - return der.encode_sequence(*seq_elements) - - def to_pem(self, encoding=None, point_encoding="uncompressed"): - """ - Serialise the curve parameters to the :term:`PEM` format. - - :param str encoding: the format to save the curve parameters in. - Default is ``named_curve``, with fallback being the ``explicit`` - if the OID is not set for the curve. - :param str point_encoding: the point encoding of the generator when - explicit curve encoding is used. Ignored for ``named_curve`` - format. - - :return: PEM encoded ECParameters structure - :rtype: str - """ - return der.topem( - self.to_der(encoding, point_encoding), "EC PARAMETERS" - ) - - @staticmethod - def from_der(data, valid_encodings=None): - """Decode the curve parameters from DER file. - - :param data: the binary string to decode the parameters from - :type data: :term:`bytes-like object` - :param valid_encodings: set of names of allowed encodings, by default - all (set by passing ``None``), supported ones are ``named_curve`` - and ``explicit`` - :type valid_encodings: :term:`set-like object` - """ - if not valid_encodings: - valid_encodings = set(("named_curve", "explicit")) - if not all(i in ["named_curve", "explicit"] for i in valid_encodings): - raise ValueError( - "Only named_curve and explicit encodings supported" - ) - data = normalise_bytes(data) - if not der.is_sequence(data): - if "named_curve" not in valid_encodings: - raise der.UnexpectedDER( - "named_curve curve parameters not allowed" - ) - oid, empty = der.remove_object(data) - if empty: - raise der.UnexpectedDER("Unexpected data after OID") - return find_curve(oid) - - if "explicit" not in valid_encodings: - raise der.UnexpectedDER("explicit curve parameters not allowed") - - seq, empty = der.remove_sequence(data) - if empty: - raise der.UnexpectedDER( - "Unexpected data after ECParameters structure" - ) - # decode the ECParameters sequence - version, rest = der.remove_integer(seq) - if version != 1: - raise der.UnexpectedDER("Unknown parameter encoding format") - field_id, rest = der.remove_sequence(rest) - curve, rest = der.remove_sequence(rest) - base_bytes, rest = der.remove_octet_string(rest) - order, rest = der.remove_integer(rest) - cofactor = None - if rest: - # the ASN.1 specification of ECParameters allows for future - # extensions of the sequence, so ignore the remaining bytes - cofactor, _ = der.remove_integer(rest) - - # decode the ECParameters.fieldID sequence - field_type, rest = der.remove_object(field_id) - if field_type == CHARACTERISTIC_TWO_FIELD_OID: - raise UnknownCurveError("Characteristic 2 curves unsupported") - if field_type != PRIME_FIELD_OID: - raise UnknownCurveError( - "Unknown field type: {0}".format(field_type) - ) - prime, empty = der.remove_integer(rest) - if empty: - raise der.UnexpectedDER( - "Unexpected data after ECParameters.fieldID.Prime-p element" - ) - - # decode the ECParameters.curve sequence - curve_a_bytes, rest = der.remove_octet_string(curve) - curve_b_bytes, rest = der.remove_octet_string(rest) - # seed can be defined here, but we don't parse it, so ignore `rest` - - curve_a = string_to_number(curve_a_bytes) - curve_b = string_to_number(curve_b_bytes) - - curve_fp = ellipticcurve.CurveFp(prime, curve_a, curve_b, cofactor) - - # decode the ECParameters.base point - - base = ellipticcurve.PointJacobi.from_bytes( - curve_fp, - base_bytes, - valid_encodings=("uncompressed", "compressed", "hybrid"), - order=order, - generator=True, - ) - tmp_curve = Curve("unknown", curve_fp, base, None) - - # if the curve matches one of the well-known ones, use the well-known - # one in preference, as it will have the OID and name associated - for i in curves: - if tmp_curve == i: - return i - return tmp_curve - - @classmethod - def from_pem(cls, string, valid_encodings=None): - """Decode the curve parameters from PEM file. - - :param str string: the text string to decode the parameters from - :param valid_encodings: set of names of allowed encodings, by default - all (set by passing ``None``), supported ones are ``named_curve`` - and ``explicit`` - :type valid_encodings: :term:`set-like object` - """ - if not PY2 and isinstance(string, str): # pragma: no branch - string = string.encode() - - ec_param_index = string.find(b"-----BEGIN EC PARAMETERS-----") - if ec_param_index == -1: - raise der.UnexpectedDER("EC PARAMETERS PEM header not found") - - return cls.from_der( - der.unpem(string[ec_param_index:]), valid_encodings - ) - - -# the SEC curves -SECP112r1 = Curve( - "SECP112r1", - ecdsa.curve_112r1, - ecdsa.generator_112r1, - (1, 3, 132, 0, 6), - "secp112r1", -) - - -SECP112r2 = Curve( - "SECP112r2", - ecdsa.curve_112r2, - ecdsa.generator_112r2, - (1, 3, 132, 0, 7), - "secp112r2", -) - - -SECP128r1 = Curve( - "SECP128r1", - ecdsa.curve_128r1, - ecdsa.generator_128r1, - (1, 3, 132, 0, 28), - "secp128r1", -) - - -SECP160r1 = Curve( - "SECP160r1", - ecdsa.curve_160r1, - ecdsa.generator_160r1, - (1, 3, 132, 0, 8), - "secp160r1", -) - - -# the NIST curves -NIST192p = Curve( - "NIST192p", - ecdsa.curve_192, - ecdsa.generator_192, - (1, 2, 840, 10045, 3, 1, 1), - "prime192v1", -) - - -NIST224p = Curve( - "NIST224p", - ecdsa.curve_224, - ecdsa.generator_224, - (1, 3, 132, 0, 33), - "secp224r1", -) - - -NIST256p = Curve( - "NIST256p", - ecdsa.curve_256, - ecdsa.generator_256, - (1, 2, 840, 10045, 3, 1, 7), - "prime256v1", -) - - -NIST384p = Curve( - "NIST384p", - ecdsa.curve_384, - ecdsa.generator_384, - (1, 3, 132, 0, 34), - "secp384r1", -) - - -NIST521p = Curve( - "NIST521p", - ecdsa.curve_521, - ecdsa.generator_521, - (1, 3, 132, 0, 35), - "secp521r1", -) - - -SECP256k1 = Curve( - "SECP256k1", - ecdsa.curve_secp256k1, - ecdsa.generator_secp256k1, - (1, 3, 132, 0, 10), - "secp256k1", -) - - -BRAINPOOLP160r1 = Curve( - "BRAINPOOLP160r1", - ecdsa.curve_brainpoolp160r1, - ecdsa.generator_brainpoolp160r1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), - "brainpoolP160r1", -) - - -BRAINPOOLP160t1 = Curve( - "BRAINPOOLP160t1", - ecdsa.curve_brainpoolp160t1, - ecdsa.generator_brainpoolp160t1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 2), - "brainpoolP160t1", -) - - -BRAINPOOLP192r1 = Curve( - "BRAINPOOLP192r1", - ecdsa.curve_brainpoolp192r1, - ecdsa.generator_brainpoolp192r1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 3), - "brainpoolP192r1", -) - - -BRAINPOOLP192t1 = Curve( - "BRAINPOOLP192t1", - ecdsa.curve_brainpoolp192t1, - ecdsa.generator_brainpoolp192t1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 4), - "brainpoolP192t1", -) - - -BRAINPOOLP224r1 = Curve( - "BRAINPOOLP224r1", - ecdsa.curve_brainpoolp224r1, - ecdsa.generator_brainpoolp224r1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 5), - "brainpoolP224r1", -) - - -BRAINPOOLP224t1 = Curve( - "BRAINPOOLP224t1", - ecdsa.curve_brainpoolp224t1, - ecdsa.generator_brainpoolp224t1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 6), - "brainpoolP224t1", -) - - -BRAINPOOLP256r1 = Curve( - "BRAINPOOLP256r1", - ecdsa.curve_brainpoolp256r1, - ecdsa.generator_brainpoolp256r1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 7), - "brainpoolP256r1", -) - - -BRAINPOOLP256t1 = Curve( - "BRAINPOOLP256t1", - ecdsa.curve_brainpoolp256t1, - ecdsa.generator_brainpoolp256t1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 8), - "brainpoolP256t1", -) - - -BRAINPOOLP320r1 = Curve( - "BRAINPOOLP320r1", - ecdsa.curve_brainpoolp320r1, - ecdsa.generator_brainpoolp320r1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 9), - "brainpoolP320r1", -) - - -BRAINPOOLP320t1 = Curve( - "BRAINPOOLP320t1", - ecdsa.curve_brainpoolp320t1, - ecdsa.generator_brainpoolp320t1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 10), - "brainpoolP320t1", -) - - -BRAINPOOLP384r1 = Curve( - "BRAINPOOLP384r1", - ecdsa.curve_brainpoolp384r1, - ecdsa.generator_brainpoolp384r1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 11), - "brainpoolP384r1", -) - - -BRAINPOOLP384t1 = Curve( - "BRAINPOOLP384t1", - ecdsa.curve_brainpoolp384t1, - ecdsa.generator_brainpoolp384t1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 12), - "brainpoolP384t1", -) - - -BRAINPOOLP512r1 = Curve( - "BRAINPOOLP512r1", - ecdsa.curve_brainpoolp512r1, - ecdsa.generator_brainpoolp512r1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 13), - "brainpoolP512r1", -) - - -BRAINPOOLP512t1 = Curve( - "BRAINPOOLP512t1", - ecdsa.curve_brainpoolp512t1, - ecdsa.generator_brainpoolp512t1, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 14), - "brainpoolP512t1", -) - - -Ed25519 = Curve( - "Ed25519", - eddsa.curve_ed25519, - eddsa.generator_ed25519, - (1, 3, 101, 112), -) - - -Ed448 = Curve( - "Ed448", - eddsa.curve_ed448, - eddsa.generator_ed448, - (1, 3, 101, 113), -) - - -# no order in particular, but keep previously added curves first -curves = [ - NIST192p, - NIST224p, - NIST256p, - NIST384p, - NIST521p, - SECP256k1, - BRAINPOOLP160r1, - BRAINPOOLP192r1, - BRAINPOOLP224r1, - BRAINPOOLP256r1, - BRAINPOOLP320r1, - BRAINPOOLP384r1, - BRAINPOOLP512r1, - SECP112r1, - SECP112r2, - SECP128r1, - SECP160r1, - Ed25519, - Ed448, - BRAINPOOLP160t1, - BRAINPOOLP192t1, - BRAINPOOLP224t1, - BRAINPOOLP256t1, - BRAINPOOLP320t1, - BRAINPOOLP384t1, - BRAINPOOLP512t1, -] - - -def find_curve(oid_curve): - """Select a curve based on its OID - - :param tuple[int,...] oid_curve: ASN.1 Object Identifier of the - curve to return, like ``(1, 2, 840, 10045, 3, 1, 7)`` for ``NIST256p``. - - :raises UnknownCurveError: When the oid doesn't match any of the supported - curves - - :rtype: ~ecdsa.curves.Curve - """ - for c in curves: - if c.oid == oid_curve: - return c - raise UnknownCurveError( - "I don't know about the curve with oid %s." - "I only know about these: %s" % (oid_curve, [c.name for c in curves]) - ) - - -def curve_by_name(name): - """Select a curve based on its name. - - Returns a :py:class:`~ecdsa.curves.Curve` object with a ``name`` name. - Note that ``name`` is case-sensitve. - - :param str name: Name of the curve to return, like ``NIST256p`` or - ``prime256v1`` - - :raises UnknownCurveError: When the name doesn't match any of the supported - curves - - :rtype: ~ecdsa.curves.Curve - """ - for c in curves: - if name == c.name or (c.openssl_name and name == c.openssl_name): - return c - raise UnknownCurveError( - "Curve with name {0!r} unknown, only curves supported: {1}".format( - name, [c.name for c in curves] - ) - ) diff --git a/venv/lib/python3.12/site-packages/ecdsa/der.py b/venv/lib/python3.12/site-packages/ecdsa/der.py deleted file mode 100644 index 7a06b681..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/der.py +++ /dev/null @@ -1,478 +0,0 @@ -from __future__ import division - -import binascii -import base64 -import warnings -from itertools import chain -from six import int2byte, text_type -from ._compat import compat26_str, str_idx_as_int - - -class UnexpectedDER(Exception): - pass - - -def encode_constructed(tag, value): - return int2byte(0xA0 + tag) + encode_length(len(value)) + value - - -def encode_implicit(tag, value, cls="context-specific"): - """ - Encode and IMPLICIT value using :term:`DER`. - - :param int tag: the tag value to encode, must be between 0 an 31 inclusive - :param bytes value: the data to encode - :param str cls: the class of the tag to encode: "application", - "context-specific", or "private" - :rtype: bytes - """ - if cls not in ("application", "context-specific", "private"): - raise ValueError("invalid tag class") - if tag > 31: - raise ValueError("Long tags not supported") - - if cls == "application": - tag_class = 0b01000000 - elif cls == "context-specific": - tag_class = 0b10000000 - else: - assert cls == "private" - tag_class = 0b11000000 - - return int2byte(tag_class + tag) + encode_length(len(value)) + value - - -def encode_integer(r): - assert r >= 0 # can't support negative numbers yet - h = ("%x" % r).encode() - if len(h) % 2: - h = b"0" + h - s = binascii.unhexlify(h) - num = str_idx_as_int(s, 0) - if num <= 0x7F: - return b"\x02" + encode_length(len(s)) + s - else: - # DER integers are two's complement, so if the first byte is - # 0x80-0xff then we need an extra 0x00 byte to prevent it from - # looking negative. - return b"\x02" + encode_length(len(s) + 1) + b"\x00" + s - - -# sentry object to check if an argument was specified (used to detect -# deprecated calling convention) -_sentry = object() - - -def encode_bitstring(s, unused=_sentry): - """ - Encode a binary string as a BIT STRING using :term:`DER` encoding. - - Note, because there is no native Python object that can encode an actual - bit string, this function only accepts byte strings as the `s` argument. - The byte string is the actual bit string that will be encoded, padded - on the right (least significant bits, looking from big endian perspective) - to the first full byte. If the bit string has a bit length that is multiple - of 8, then the padding should not be included. For correct DER encoding - the padding bits MUST be set to 0. - - Number of bits of padding need to be provided as the `unused` parameter. - In case they are specified as None, it means the number of unused bits - is already encoded in the string as the first byte. - - The deprecated call convention specifies just the `s` parameters and - encodes the number of unused bits as first parameter (same convention - as with None). - - Empty string must be encoded with `unused` specified as 0. - - Future version of python-ecdsa will make specifying the `unused` argument - mandatory. - - :param s: bytes to encode - :type s: bytes like object - :param unused: number of bits at the end of `s` that are unused, must be - between 0 and 7 (inclusive) - :type unused: int or None - - :raises ValueError: when `unused` is too large or too small - - :return: `s` encoded using DER - :rtype: bytes - """ - encoded_unused = b"" - len_extra = 0 - if unused is _sentry: - warnings.warn( - "Legacy call convention used, unused= needs to be specified", - DeprecationWarning, - ) - elif unused is not None: - if not 0 <= unused <= 7: - raise ValueError("unused must be integer between 0 and 7") - if unused: - if not s: - raise ValueError("unused is non-zero but s is empty") - last = str_idx_as_int(s, -1) - if last & (2**unused - 1): - raise ValueError("unused bits must be zeros in DER") - encoded_unused = int2byte(unused) - len_extra = 1 - return b"\x03" + encode_length(len(s) + len_extra) + encoded_unused + s - - -def encode_octet_string(s): - return b"\x04" + encode_length(len(s)) + s - - -def encode_oid(first, second, *pieces): - assert 0 <= first < 2 and 0 <= second <= 39 or first == 2 and 0 <= second - body = b"".join( - chain( - [encode_number(40 * first + second)], - (encode_number(p) for p in pieces), - ) - ) - return b"\x06" + encode_length(len(body)) + body - - -def encode_sequence(*encoded_pieces): - total_len = sum([len(p) for p in encoded_pieces]) - return b"\x30" + encode_length(total_len) + b"".join(encoded_pieces) - - -def encode_number(n): - b128_digits = [] - while n: - b128_digits.insert(0, (n & 0x7F) | 0x80) - n = n >> 7 - if not b128_digits: - b128_digits.append(0) - b128_digits[-1] &= 0x7F - return b"".join([int2byte(d) for d in b128_digits]) - - -def is_sequence(string): - return string and string[:1] == b"\x30" - - -def remove_constructed(string): - s0 = str_idx_as_int(string, 0) - if (s0 & 0xE0) != 0xA0: - raise UnexpectedDER( - "wanted type 'constructed tag' (0xa0-0xbf), got 0x%02x" % s0 - ) - tag = s0 & 0x1F - length, llen = read_length(string[1:]) - body = string[1 + llen : 1 + llen + length] - rest = string[1 + llen + length :] - return tag, body, rest - - -def remove_implicit(string, exp_class="context-specific"): - """ - Removes an IMPLICIT tagged value from ``string`` following :term:`DER`. - - :param bytes string: a byte string that can have one or more - DER elements. - :param str exp_class: the expected tag class of the implicitly - encoded value. Possible values are: "context-specific", "application", - and "private". - :return: a tuple with first value being the tag without indicator bits, - second being the raw bytes of the value and the third one being - remaining bytes (or an empty string if there are none) - :rtype: tuple(int,bytes,bytes) - """ - if exp_class not in ("context-specific", "application", "private"): - raise ValueError("invalid `exp_class` value") - if exp_class == "application": - tag_class = 0b01000000 - elif exp_class == "context-specific": - tag_class = 0b10000000 - else: - assert exp_class == "private" - tag_class = 0b11000000 - tag_mask = 0b11000000 - - s0 = str_idx_as_int(string, 0) - - if (s0 & tag_mask) != tag_class: - raise UnexpectedDER( - "wanted class {0}, got 0x{1:02x} tag".format(exp_class, s0) - ) - if s0 & 0b00100000 != 0: - raise UnexpectedDER( - "wanted type primitive, got 0x{0:02x} tag".format(s0) - ) - - tag = s0 & 0x1F - length, llen = read_length(string[1:]) - body = string[1 + llen : 1 + llen + length] - rest = string[1 + llen + length :] - return tag, body, rest - - -def remove_sequence(string): - if not string: - raise UnexpectedDER("Empty string does not encode a sequence") - if string[:1] != b"\x30": - n = str_idx_as_int(string, 0) - raise UnexpectedDER("wanted type 'sequence' (0x30), got 0x%02x" % n) - length, lengthlength = read_length(string[1:]) - if length > len(string) - 1 - lengthlength: - raise UnexpectedDER("Length longer than the provided buffer") - endseq = 1 + lengthlength + length - return string[1 + lengthlength : endseq], string[endseq:] - - -def remove_octet_string(string): - if string[:1] != b"\x04": - n = str_idx_as_int(string, 0) - raise UnexpectedDER("wanted type 'octetstring' (0x04), got 0x%02x" % n) - length, llen = read_length(string[1:]) - body = string[1 + llen : 1 + llen + length] - rest = string[1 + llen + length :] - return body, rest - - -def remove_object(string): - if not string: - raise UnexpectedDER( - "Empty string does not encode an object identifier" - ) - if string[:1] != b"\x06": - n = str_idx_as_int(string, 0) - raise UnexpectedDER("wanted type 'object' (0x06), got 0x%02x" % n) - length, lengthlength = read_length(string[1:]) - body = string[1 + lengthlength : 1 + lengthlength + length] - rest = string[1 + lengthlength + length :] - if not body: - raise UnexpectedDER("Empty object identifier") - if len(body) != length: - raise UnexpectedDER( - "Length of object identifier longer than the provided buffer" - ) - numbers = [] - while body: - n, ll = read_number(body) - numbers.append(n) - body = body[ll:] - n0 = numbers.pop(0) - if n0 < 80: - first = n0 // 40 - else: - first = 2 - second = n0 - (40 * first) - numbers.insert(0, first) - numbers.insert(1, second) - return tuple(numbers), rest - - -def remove_integer(string): - if not string: - raise UnexpectedDER( - "Empty string is an invalid encoding of an integer" - ) - if string[:1] != b"\x02": - n = str_idx_as_int(string, 0) - raise UnexpectedDER("wanted type 'integer' (0x02), got 0x%02x" % n) - length, llen = read_length(string[1:]) - if length > len(string) - 1 - llen: - raise UnexpectedDER("Length longer than provided buffer") - if length == 0: - raise UnexpectedDER("0-byte long encoding of integer") - numberbytes = string[1 + llen : 1 + llen + length] - rest = string[1 + llen + length :] - msb = str_idx_as_int(numberbytes, 0) - if not msb < 0x80: - raise UnexpectedDER("Negative integers are not supported") - # check if the encoding is the minimal one (DER requirement) - if length > 1 and not msb: - # leading zero byte is allowed if the integer would have been - # considered a negative number otherwise - smsb = str_idx_as_int(numberbytes, 1) - if smsb < 0x80: - raise UnexpectedDER( - "Invalid encoding of integer, unnecessary " - "zero padding bytes" - ) - return int(binascii.hexlify(numberbytes), 16), rest - - -def read_number(string): - number = 0 - llen = 0 - if str_idx_as_int(string, 0) == 0x80: - raise UnexpectedDER("Non minimal encoding of OID subidentifier") - # base-128 big endian, with most significant bit set in all but the last - # byte - while True: - if llen >= len(string): - raise UnexpectedDER("ran out of length bytes") - number = number << 7 - d = str_idx_as_int(string, llen) - number += d & 0x7F - llen += 1 - if not d & 0x80: - break - return number, llen - - -def encode_length(l): - assert l >= 0 - if l < 0x80: - return int2byte(l) - s = ("%x" % l).encode() - if len(s) % 2: - s = b"0" + s - s = binascii.unhexlify(s) - llen = len(s) - return int2byte(0x80 | llen) + s - - -def read_length(string): - if not string: - raise UnexpectedDER("Empty string can't encode valid length value") - num = str_idx_as_int(string, 0) - if not (num & 0x80): - # short form - return (num & 0x7F), 1 - # else long-form: b0&0x7f is number of additional base256 length bytes, - # big-endian - llen = num & 0x7F - if not llen: - raise UnexpectedDER("Invalid length encoding, length of length is 0") - if llen > len(string) - 1: - raise UnexpectedDER("Length of length longer than provided buffer") - # verify that the encoding is minimal possible (DER requirement) - msb = str_idx_as_int(string, 1) - if not msb or llen == 1 and msb < 0x80: - raise UnexpectedDER("Not minimal encoding of length") - return int(binascii.hexlify(string[1 : 1 + llen]), 16), 1 + llen - - -def remove_bitstring(string, expect_unused=_sentry): - """ - Remove a BIT STRING object from `string` following :term:`DER`. - - The `expect_unused` can be used to specify if the bit string should - have the amount of unused bits decoded or not. If it's an integer, any - read BIT STRING that has number of unused bits different from specified - value will cause UnexpectedDER exception to be raised (this is especially - useful when decoding BIT STRINGS that have DER encoded object in them; - DER encoding is byte oriented, so the unused bits will always equal 0). - - If the `expect_unused` is specified as None, the first element returned - will be a tuple, with the first value being the extracted bit string - while the second value will be the decoded number of unused bits. - - If the `expect_unused` is unspecified, the decoding of byte with - number of unused bits will not be attempted and the bit string will be - returned as-is, the callee will be required to decode it and verify its - correctness. - - Future version of python will require the `expected_unused` parameter - to be specified. - - :param string: string of bytes to extract the BIT STRING from - :type string: bytes like object - :param expect_unused: number of bits that should be unused in the BIT - STRING, or None, to return it to caller - :type expect_unused: int or None - - :raises UnexpectedDER: when the encoding does not follow DER. - - :return: a tuple with first element being the extracted bit string and - the second being the remaining bytes in the string (if any); if the - `expect_unused` is specified as None, the first element of the returned - tuple will be a tuple itself, with first element being the bit string - as bytes and the second element being the number of unused bits at the - end of the byte array as an integer - :rtype: tuple - """ - if not string: - raise UnexpectedDER("Empty string does not encode a bitstring") - if expect_unused is _sentry: - warnings.warn( - "Legacy call convention used, expect_unused= needs to be" - " specified", - DeprecationWarning, - ) - num = str_idx_as_int(string, 0) - if string[:1] != b"\x03": - raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num) - length, llen = read_length(string[1:]) - if not length: - raise UnexpectedDER("Invalid length of bit string, can't be 0") - body = string[1 + llen : 1 + llen + length] - rest = string[1 + llen + length :] - if expect_unused is not _sentry: - unused = str_idx_as_int(body, 0) - if not 0 <= unused <= 7: - raise UnexpectedDER("Invalid encoding of unused bits") - if expect_unused is not None and expect_unused != unused: - raise UnexpectedDER("Unexpected number of unused bits") - body = body[1:] - if unused: - if not body: - raise UnexpectedDER("Invalid encoding of empty bit string") - last = str_idx_as_int(body, -1) - # verify that all the unused bits are set to zero (DER requirement) - if last & (2**unused - 1): - raise UnexpectedDER("Non zero padding bits in bit string") - if expect_unused is None: - body = (body, unused) - return body, rest - - -# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING) - - -# signatures: (from RFC3279) -# ansi-X9-62 OBJECT IDENTIFIER ::= { -# iso(1) member-body(2) us(840) 10045 } -# -# id-ecSigType OBJECT IDENTIFIER ::= { -# ansi-X9-62 signatures(4) } -# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= { -# id-ecSigType 1 } -# so 1,2,840,10045,4,1 -# so 0x42, .. .. - -# Ecdsa-Sig-Value ::= SEQUENCE { -# r INTEGER, -# s INTEGER } - -# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 } -# -# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 } - -# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021) -# secp224r1 OBJECT IDENTIFIER ::= { -# iso(1) identified-organization(3) certicom(132) curve(0) 33 } -# and the secp384r1 is (t=06,l=05,v=2b81040022) -# secp384r1 OBJECT IDENTIFIER ::= { -# iso(1) identified-organization(3) certicom(132) curve(0) 34 } - - -def unpem(pem): - if isinstance(pem, text_type): # pragma: no branch - pem = pem.encode() - - d = b"".join( - [ - l.strip() - for l in pem.split(b"\n") - if l and not l.startswith(b"-----") - ] - ) - return base64.b64decode(d) - - -def topem(der, name): - b64 = base64.b64encode(compat26_str(der)) - lines = [("-----BEGIN %s-----\n" % name).encode()] - lines.extend( - [b64[start : start + 76] + b"\n" for start in range(0, len(b64), 76)] - ) - lines.append(("-----END %s-----\n" % name).encode()) - return b"".join(lines) diff --git a/venv/lib/python3.12/site-packages/ecdsa/ecdh.py b/venv/lib/python3.12/site-packages/ecdsa/ecdh.py deleted file mode 100644 index 7f697d9a..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/ecdh.py +++ /dev/null @@ -1,336 +0,0 @@ -""" -Class for performing Elliptic-curve Diffie-Hellman (ECDH) operations. -""" - -from .util import number_to_string -from .ellipticcurve import INFINITY -from .keys import SigningKey, VerifyingKey - - -__all__ = [ - "ECDH", - "NoKeyError", - "NoCurveError", - "InvalidCurveError", - "InvalidSharedSecretError", -] - - -class NoKeyError(Exception): - """ECDH. Key not found but it is needed for operation.""" - - pass - - -class NoCurveError(Exception): - """ECDH. Curve not set but it is needed for operation.""" - - pass - - -class InvalidCurveError(Exception): - """ - ECDH. Raised in case the public and private keys use different curves. - """ - - pass - - -class InvalidSharedSecretError(Exception): - """ECDH. Raised in case the shared secret we obtained is an INFINITY.""" - - pass - - -class ECDH(object): - """ - Elliptic-curve Diffie-Hellman (ECDH). A key agreement protocol. - - Allows two parties, each having an elliptic-curve public-private key - pair, to establish a shared secret over an insecure channel - """ - - def __init__(self, curve=None, private_key=None, public_key=None): - """ - ECDH init. - - Call can be initialised without parameters, then the first operation - (loading either key) will set the used curve. - All parameters must be ultimately set before shared secret - calculation will be allowed. - - :param curve: curve for operations - :type curve: Curve - :param private_key: `my` private key for ECDH - :type private_key: SigningKey - :param public_key: `their` public key for ECDH - :type public_key: VerifyingKey - """ - self.curve = curve - self.private_key = None - self.public_key = None - if private_key: - self.load_private_key(private_key) - if public_key: - self.load_received_public_key(public_key) - - def _get_shared_secret(self, remote_public_key): - if not self.private_key: - raise NoKeyError( - "Private key needs to be set to create shared secret" - ) - if not self.public_key: - raise NoKeyError( - "Public key needs to be set to create shared secret" - ) - if not ( - self.private_key.curve == self.curve == remote_public_key.curve - ): - raise InvalidCurveError( - "Curves for public key and private key is not equal." - ) - - # shared secret = PUBKEYtheirs * PRIVATEKEYours - result = ( - remote_public_key.pubkey.point - * self.private_key.privkey.secret_multiplier - ) - if result == INFINITY: - raise InvalidSharedSecretError("Invalid shared secret (INFINITY).") - - return result.x() - - def set_curve(self, key_curve): - """ - Set the working curve for ecdh operations. - - :param key_curve: curve from `curves` module - :type key_curve: Curve - """ - self.curve = key_curve - - def generate_private_key(self): - """ - Generate local private key for ecdh operation with curve that was set. - - :raises NoCurveError: Curve must be set before key generation. - - :return: public (verifying) key from this private key. - :rtype: VerifyingKey - """ - if not self.curve: - raise NoCurveError("Curve must be set prior to key generation.") - return self.load_private_key(SigningKey.generate(curve=self.curve)) - - def load_private_key(self, private_key): - """ - Load private key from SigningKey (keys.py) object. - - Needs to have the same curve as was set with set_curve method. - If curve is not set - it sets from this SigningKey - - :param private_key: Initialised SigningKey class - :type private_key: SigningKey - - :raises InvalidCurveError: private_key curve not the same as self.curve - - :return: public (verifying) key from this private key. - :rtype: VerifyingKey - """ - if not self.curve: - self.curve = private_key.curve - if self.curve != private_key.curve: - raise InvalidCurveError("Curve mismatch.") - self.private_key = private_key - return self.private_key.get_verifying_key() - - def load_private_key_bytes(self, private_key): - """ - Load private key from byte string. - - Uses current curve and checks if the provided key matches - the curve of ECDH key agreement. - Key loads via from_string method of SigningKey class - - :param private_key: private key in bytes string format - :type private_key: :term:`bytes-like object` - - :raises NoCurveError: Curve must be set before loading. - - :return: public (verifying) key from this private key. - :rtype: VerifyingKey - """ - if not self.curve: - raise NoCurveError("Curve must be set prior to key load.") - return self.load_private_key( - SigningKey.from_string(private_key, curve=self.curve) - ) - - def load_private_key_der(self, private_key_der): - """ - Load private key from DER byte string. - - Compares the curve of the DER-encoded key with the ECDH set curve, - uses the former if unset. - - Note, the only DER format supported is the RFC5915 - Look at keys.py:SigningKey.from_der() - - :param private_key_der: string with the DER encoding of private ECDSA - key - :type private_key_der: string - - :raises InvalidCurveError: private_key curve not the same as self.curve - - :return: public (verifying) key from this private key. - :rtype: VerifyingKey - """ - return self.load_private_key(SigningKey.from_der(private_key_der)) - - def load_private_key_pem(self, private_key_pem): - """ - Load private key from PEM string. - - Compares the curve of the DER-encoded key with the ECDH set curve, - uses the former if unset. - - Note, the only PEM format supported is the RFC5915 - Look at keys.py:SigningKey.from_pem() - it needs to have `EC PRIVATE KEY` section - - :param private_key_pem: string with PEM-encoded private ECDSA key - :type private_key_pem: string - - :raises InvalidCurveError: private_key curve not the same as self.curve - - :return: public (verifying) key from this private key. - :rtype: VerifyingKey - """ - return self.load_private_key(SigningKey.from_pem(private_key_pem)) - - def get_public_key(self): - """ - Provides a public key that matches the local private key. - - Needs to be sent to the remote party. - - :return: public (verifying) key from local private key. - :rtype: VerifyingKey - """ - return self.private_key.get_verifying_key() - - def load_received_public_key(self, public_key): - """ - Load public key from VerifyingKey (keys.py) object. - - Needs to have the same curve as set as current for ecdh operation. - If curve is not set - it sets it from VerifyingKey. - - :param public_key: Initialised VerifyingKey class - :type public_key: VerifyingKey - - :raises InvalidCurveError: public_key curve not the same as self.curve - """ - if not self.curve: - self.curve = public_key.curve - if self.curve != public_key.curve: - raise InvalidCurveError("Curve mismatch.") - self.public_key = public_key - - def load_received_public_key_bytes( - self, public_key_str, valid_encodings=None - ): - """ - Load public key from byte string. - - Uses current curve and checks if key length corresponds to - the current curve. - Key loads via from_string method of VerifyingKey class - - :param public_key_str: public key in bytes string format - :type public_key_str: :term:`bytes-like object` - :param valid_encodings: list of acceptable point encoding formats, - supported ones are: :term:`uncompressed`, :term:`compressed`, - :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` - name). All formats by default (specified with ``None``). - :type valid_encodings: :term:`set-like object` - """ - return self.load_received_public_key( - VerifyingKey.from_string( - public_key_str, self.curve, valid_encodings - ) - ) - - def load_received_public_key_der(self, public_key_der): - """ - Load public key from DER byte string. - - Compares the curve of the DER-encoded key with the ECDH set curve, - uses the former if unset. - - Note, the only DER format supported is the RFC5912 - Look at keys.py:VerifyingKey.from_der() - - :param public_key_der: string with the DER encoding of public ECDSA key - :type public_key_der: string - - :raises InvalidCurveError: public_key curve not the same as self.curve - """ - return self.load_received_public_key( - VerifyingKey.from_der(public_key_der) - ) - - def load_received_public_key_pem(self, public_key_pem): - """ - Load public key from PEM string. - - Compares the curve of the PEM-encoded key with the ECDH set curve, - uses the former if unset. - - Note, the only PEM format supported is the RFC5912 - Look at keys.py:VerifyingKey.from_pem() - - :param public_key_pem: string with PEM-encoded public ECDSA key - :type public_key_pem: string - - :raises InvalidCurveError: public_key curve not the same as self.curve - """ - return self.load_received_public_key( - VerifyingKey.from_pem(public_key_pem) - ) - - def generate_sharedsecret_bytes(self): - """ - Generate shared secret from local private key and remote public key. - - The objects needs to have both private key and received public key - before generation is allowed. - - :raises InvalidCurveError: public_key curve not the same as self.curve - :raises NoKeyError: public_key or private_key is not set - - :return: shared secret - :rtype: bytes - """ - return number_to_string( - self.generate_sharedsecret(), self.private_key.curve.curve.p() - ) - - def generate_sharedsecret(self): - """ - Generate shared secret from local private key and remote public key. - - The objects needs to have both private key and received public key - before generation is allowed. - - It's the same for local and remote party, - shared secret(local private key, remote public key) == - shared secret(local public key, remote private key) - - :raises InvalidCurveError: public_key curve not the same as self.curve - :raises NoKeyError: public_key or private_key is not set - - :return: shared secret - :rtype: int - """ - return self._get_shared_secret(self.public_key) diff --git a/venv/lib/python3.12/site-packages/ecdsa/ecdsa.py b/venv/lib/python3.12/site-packages/ecdsa/ecdsa.py deleted file mode 100644 index f7109659..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/ecdsa.py +++ /dev/null @@ -1,1094 +0,0 @@ -#! /usr/bin/env python - -""" -Low level implementation of Elliptic-Curve Digital Signatures. - -.. note :: - You're most likely looking for the :py:class:`~ecdsa.keys` module. - This is a low-level implementation of the ECDSA that operates on - integers, not byte strings. - -NOTE: This a low level implementation of ECDSA, for normal applications -you should be looking at the keys.py module. - -Classes and methods for elliptic-curve signatures: -private keys, public keys, signatures, -and definitions of prime-modulus curves. - -Example: - -.. code-block:: python - - # (In real-life applications, you would probably want to - # protect against defects in SystemRandom.) - from random import SystemRandom - randrange = SystemRandom().randrange - - # Generate a public/private key pair using the NIST Curve P-192: - - g = generator_192 - n = g.order() - secret = randrange( 1, n ) - pubkey = Public_key( g, g * secret ) - privkey = Private_key( pubkey, secret ) - - # Signing a hash value: - - hash = randrange( 1, n ) - signature = privkey.sign( hash, randrange( 1, n ) ) - - # Verifying a signature for a hash value: - - if pubkey.verifies( hash, signature ): - print("Demo verification succeeded.") - else: - print("*** Demo verification failed.") - - # Verification fails if the hash value is modified: - - if pubkey.verifies( hash-1, signature ): - print("**** Demo verification failed to reject tampered hash.") - else: - print("Demo verification correctly rejected tampered hash.") - -Revision history: - 2005.12.31 - Initial version. - - 2008.11.25 - Substantial revisions introducing new classes. - - 2009.05.16 - Warn against using random.randrange in real applications. - - 2009.05.17 - Use random.SystemRandom by default. - -Originally written in 2005 by Peter Pearson and placed in the public domain, -modified as part of the python-ecdsa package. -""" - -import warnings -from six import int2byte -from . import ellipticcurve -from . import numbertheory -from .util import bit_length -from ._compat import remove_whitespace - - -class RSZeroError(RuntimeError): - pass - - -class InvalidPointError(RuntimeError): - pass - - -class Signature(object): - """ - ECDSA signature. - - :ivar int r: the ``r`` element of the ECDSA signature - :ivar int s: the ``s`` element of the ECDSA signature - """ - - def __init__(self, r, s): - self.r = r - self.s = s - - def recover_public_keys(self, hash, generator): - """ - Returns two public keys for which the signature is valid - - :param int hash: signed hash - :param AbstractPoint generator: is the generator used in creation - of the signature - :rtype: tuple(Public_key, Public_key) - :return: a pair of public keys that can validate the signature - """ - curve = generator.curve() - n = generator.order() - r = self.r - s = self.s - e = hash - x = r - - # Compute the curve point with x as x-coordinate - alpha = ( - pow(x, 3, curve.p()) + (curve.a() * x) + curve.b() - ) % curve.p() - beta = numbertheory.square_root_mod_prime(alpha, curve.p()) - y = beta if beta % 2 == 0 else curve.p() - beta - - # Compute the public key - R1 = ellipticcurve.PointJacobi(curve, x, y, 1, n) - Q1 = numbertheory.inverse_mod(r, n) * (s * R1 + (-e % n) * generator) - Pk1 = Public_key(generator, Q1) - - # And the second solution - R2 = ellipticcurve.PointJacobi(curve, x, -y, 1, n) - Q2 = numbertheory.inverse_mod(r, n) * (s * R2 + (-e % n) * generator) - Pk2 = Public_key(generator, Q2) - - return [Pk1, Pk2] - - -class Public_key(object): - """Public key for ECDSA.""" - - def __init__(self, generator, point, verify=True): - """Low level ECDSA public key object. - - :param generator: the Point that generates the group (the base point) - :param point: the Point that defines the public key - :param bool verify: if True check if point is valid point on curve - - :raises InvalidPointError: if the point parameters are invalid or - point does not lay on the curve - """ - - self.curve = generator.curve() - self.generator = generator - self.point = point - n = generator.order() - p = self.curve.p() - if not (0 <= point.x() < p) or not (0 <= point.y() < p): - raise InvalidPointError( - "The public point has x or y out of range." - ) - if verify and not self.curve.contains_point(point.x(), point.y()): - raise InvalidPointError("Point does not lay on the curve") - if not n: - raise InvalidPointError("Generator point must have order.") - # for curve parameters with base point with cofactor 1, all points - # that are on the curve are scalar multiples of the base point, so - # verifying that is not necessary. See Section 3.2.2.1 of SEC 1 v2 - if ( - verify - and self.curve.cofactor() != 1 - and not n * point == ellipticcurve.INFINITY - ): - raise InvalidPointError("Generator point order is bad.") - - def __eq__(self, other): - """Return True if the keys are identical, False otherwise. - - Note: for comparison, only placement on the same curve and point - equality is considered, use of the same generator point is not - considered. - """ - if isinstance(other, Public_key): - return self.curve == other.curve and self.point == other.point - return NotImplemented - - def __ne__(self, other): - """Return False if the keys are identical, True otherwise.""" - return not self == other - - def verifies(self, hash, signature): - """Verify that signature is a valid signature of hash. - Return True if the signature is valid. - """ - - # From X9.62 J.3.1. - - G = self.generator - n = G.order() - r = signature.r - s = signature.s - if r < 1 or r > n - 1: - return False - if s < 1 or s > n - 1: - return False - c = numbertheory.inverse_mod(s, n) - u1 = (hash * c) % n - u2 = (r * c) % n - if hasattr(G, "mul_add"): - xy = G.mul_add(u1, self.point, u2) - else: - xy = u1 * G + u2 * self.point - v = xy.x() % n - return v == r - - -class Private_key(object): - """Private key for ECDSA.""" - - def __init__(self, public_key, secret_multiplier): - """public_key is of class Public_key; - secret_multiplier is a large integer. - """ - - self.public_key = public_key - self.secret_multiplier = secret_multiplier - - def __eq__(self, other): - """Return True if the points are identical, False otherwise.""" - if isinstance(other, Private_key): - return ( - self.public_key == other.public_key - and self.secret_multiplier == other.secret_multiplier - ) - return NotImplemented - - def __ne__(self, other): - """Return False if the points are identical, True otherwise.""" - return not self == other - - def sign(self, hash, random_k): - """Return a signature for the provided hash, using the provided - random nonce. It is absolutely vital that random_k be an unpredictable - number in the range [1, self.public_key.point.order()-1]. If - an attacker can guess random_k, he can compute our private key from a - single signature. Also, if an attacker knows a few high-order - bits (or a few low-order bits) of random_k, he can compute our private - key from many signatures. The generation of nonces with adequate - cryptographic strength is very difficult and far beyond the scope - of this comment. - - May raise RuntimeError, in which case retrying with a new - random value k is in order. - """ - - G = self.public_key.generator - n = G.order() - k = random_k % n - # Fix the bit-length of the random nonce, - # so that it doesn't leak via timing. - # This does not change that ks = k mod n - ks = k + n - kt = ks + n - if bit_length(ks) == bit_length(n): - p1 = kt * G - else: - p1 = ks * G - r = p1.x() % n - if r == 0: - raise RSZeroError("amazingly unlucky random number r") - s = ( - numbertheory.inverse_mod(k, n) - * (hash + (self.secret_multiplier * r) % n) - ) % n - if s == 0: - raise RSZeroError("amazingly unlucky random number s") - return Signature(r, s) - - -def int_to_string(x): # pragma: no cover - """Convert integer x into a string of bytes, as per X9.62.""" - # deprecated in 0.19 - warnings.warn( - "Function is unused in library code. If you use this code, " - "change to util.number_to_string.", - DeprecationWarning, - ) - assert x >= 0 - if x == 0: - return b"\0" - result = [] - while x: - ordinal = x & 0xFF - result.append(int2byte(ordinal)) - x >>= 8 - - result.reverse() - return b"".join(result) - - -def string_to_int(s): # pragma: no cover - """Convert a string of bytes into an integer, as per X9.62.""" - # deprecated in 0.19 - warnings.warn( - "Function is unused in library code. If you use this code, " - "change to util.string_to_number.", - DeprecationWarning, - ) - result = 0 - for c in s: - if not isinstance(c, int): - c = ord(c) - result = 256 * result + c - return result - - -def digest_integer(m): # pragma: no cover - """Convert an integer into a string of bytes, compute - its SHA-1 hash, and convert the result to an integer.""" - # deprecated in 0.19 - warnings.warn( - "Function is unused in library code. If you use this code, " - "change to a one-liner with util.number_to_string and " - "util.string_to_number methods.", - DeprecationWarning, - ) - # - # I don't expect this function to be used much. I wrote - # it in order to be able to duplicate the examples - # in ECDSAVS. - # - from hashlib import sha1 - - return string_to_int(sha1(int_to_string(m)).digest()) - - -def point_is_valid(generator, x, y): - """Is (x,y) a valid public key based on the specified generator?""" - - # These are the tests specified in X9.62. - - n = generator.order() - curve = generator.curve() - p = curve.p() - if not (0 <= x < p) or not (0 <= y < p): - return False - if not curve.contains_point(x, y): - return False - if ( - curve.cofactor() != 1 - and not n * ellipticcurve.PointJacobi(curve, x, y, 1) - == ellipticcurve.INFINITY - ): - return False - return True - - -# secp112r1 curve -_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) -# s = 00F50B02 8E4D696E 67687561 51752904 72783FB1 -_a = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD2088"), 16) -_b = int(remove_whitespace("659E F8BA0439 16EEDE89 11702B22"), 16) -_Gx = int(remove_whitespace("09487239 995A5EE7 6B55F9C2 F098"), 16) -_Gy = int(remove_whitespace("A89C E5AF8724 C0A23E0E 0FF77500"), 16) -_r = int(remove_whitespace("DB7C 2ABF62E3 5E7628DF AC6561C5"), 16) -_h = 1 -curve_112r1 = ellipticcurve.CurveFp(_p, _a, _b, _h) -generator_112r1 = ellipticcurve.PointJacobi( - curve_112r1, _Gx, _Gy, 1, _r, generator=True -) - - -# secp112r2 curve -_p = int(remove_whitespace("DB7C 2ABF62E3 5E668076 BEAD208B"), 16) -# s = 022757A1 114D69E 67687561 51755316 C05E0BD4 -_a = int(remove_whitespace("6127 C24C05F3 8A0AAAF6 5C0EF02C"), 16) -_b = int(remove_whitespace("51DE F1815DB5 ED74FCC3 4C85D709"), 16) -_Gx = int(remove_whitespace("4BA30AB5 E892B4E1 649DD092 8643"), 16) -_Gy = int(remove_whitespace("ADCD 46F5882E 3747DEF3 6E956E97"), 16) -_r = int(remove_whitespace("36DF 0AAFD8B8 D7597CA1 0520D04B"), 16) -_h = 4 -curve_112r2 = ellipticcurve.CurveFp(_p, _a, _b, _h) -generator_112r2 = ellipticcurve.PointJacobi( - curve_112r2, _Gx, _Gy, 1, _r, generator=True -) - - -# secp128r1 curve -_p = int(remove_whitespace("FFFFFFFD FFFFFFFF FFFFFFFF FFFFFFFF"), 16) -# S = 000E0D4D 69E6768 75615175 0CC03A44 73D03679 -# a and b are mod p, so a is equal to p-3, or simply -3 -# _a = -3 -_b = int(remove_whitespace("E87579C1 1079F43D D824993C 2CEE5ED3"), 16) -_Gx = int(remove_whitespace("161FF752 8B899B2D 0C28607C A52C5B86"), 16) -_Gy = int(remove_whitespace("CF5AC839 5BAFEB13 C02DA292 DDED7A83"), 16) -_r = int(remove_whitespace("FFFFFFFE 00000000 75A30D1B 9038A115"), 16) -_h = 1 -curve_128r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) -generator_128r1 = ellipticcurve.PointJacobi( - curve_128r1, _Gx, _Gy, 1, _r, generator=True -) - - -# secp160r1 -_p = int(remove_whitespace("FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF 7FFFFFFF"), 16) -# S = 1053CDE4 2C14D696 E6768756 1517533B F3F83345 -# a and b are mod p, so a is equal to p-3, or simply -3 -# _a = -3 -_b = int(remove_whitespace("1C97BEFC 54BD7A8B 65ACF89F 81D4D4AD C565FA45"), 16) -_Gx = int( - remove_whitespace("4A96B568 8EF57328 46646989 68C38BB9 13CBFC82"), - 16, -) -_Gy = int( - remove_whitespace("23A62855 3168947D 59DCC912 04235137 7AC5FB32"), - 16, -) -_r = int( - remove_whitespace("01 00000000 00000000 0001F4C8 F927AED3 CA752257"), - 16, -) -_h = 1 -curve_160r1 = ellipticcurve.CurveFp(_p, -3, _b, _h) -generator_160r1 = ellipticcurve.PointJacobi( - curve_160r1, _Gx, _Gy, 1, _r, generator=True -) - - -# NIST Curve P-192: -_p = 6277101735386680763835789423207666416083908700390324961279 -_r = 6277101735386680763835789423176059013767194773182842284081 -# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L -# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L -_b = int( - remove_whitespace( - """ - 64210519 E59C80E7 0FA7E9AB 72243049 FEB8DEEC C146B9B1""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - 188DA80E B03090F6 7CBF20EB 43A18800 F4FF0AFD 82FF1012""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 07192B95 FFC8DA78 631011ED 6B24CDD5 73F977A1 1E794811""" - ), - 16, -) - -curve_192 = ellipticcurve.CurveFp(_p, -3, _b, 1) -generator_192 = ellipticcurve.PointJacobi( - curve_192, _Gx, _Gy, 1, _r, generator=True -) - - -# NIST Curve P-224: -_p = int( - remove_whitespace( - """ - 2695994666715063979466701508701963067355791626002630814351 - 0066298881""" - ) -) -_r = int( - remove_whitespace( - """ - 2695994666715063979466701508701962594045780771442439172168 - 2722368061""" - ) -) -# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L -# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL -_b = int( - remove_whitespace( - """ - B4050A85 0C04B3AB F5413256 5044B0B7 D7BFD8BA 270B3943 - 2355FFB4""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - B70E0CBD 6BB4BF7F 321390B9 4A03C1D3 56C21122 343280D6 - 115C1D21""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - BD376388 B5F723FB 4C22DFE6 CD4375A0 5A074764 44D58199 - 85007E34""" - ), - 16, -) - -curve_224 = ellipticcurve.CurveFp(_p, -3, _b, 1) -generator_224 = ellipticcurve.PointJacobi( - curve_224, _Gx, _Gy, 1, _r, generator=True -) - -# NIST Curve P-256: -_p = int( - remove_whitespace( - """ - 1157920892103562487626974469494075735300861434152903141955 - 33631308867097853951""" - ) -) -_r = int( - remove_whitespace( - """ - 115792089210356248762697446949407573529996955224135760342 - 422259061068512044369""" - ) -) -# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L -# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL -_b = int( - remove_whitespace( - """ - 5AC635D8 AA3A93E7 B3EBBD55 769886BC 651D06B0 CC53B0F6 - 3BCE3C3E 27D2604B""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - 6B17D1F2 E12C4247 F8BCE6E5 63A440F2 77037D81 2DEB33A0 - F4A13945 D898C296""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 4FE342E2 FE1A7F9B 8EE7EB4A 7C0F9E16 2BCE3357 6B315ECE - CBB64068 37BF51F5""" - ), - 16, -) - -curve_256 = ellipticcurve.CurveFp(_p, -3, _b, 1) -generator_256 = ellipticcurve.PointJacobi( - curve_256, _Gx, _Gy, 1, _r, generator=True -) - -# NIST Curve P-384: -_p = int( - remove_whitespace( - """ - 3940200619639447921227904010014361380507973927046544666794 - 8293404245721771496870329047266088258938001861606973112319""" - ) -) -_r = int( - remove_whitespace( - """ - 3940200619639447921227904010014361380507973927046544666794 - 6905279627659399113263569398956308152294913554433653942643""" - ) -) -# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L -# c = int(remove_whitespace( -# """ -# 79d1e655 f868f02f ff48dcde e14151dd b80643c1 406d0ca1 -# 0dfe6fc5 2009540a 495e8042 ea5f744f 6e184667 cc722483""" -# ), 16) -_b = int( - remove_whitespace( - """ - B3312FA7 E23EE7E4 988E056B E3F82D19 181D9C6E FE814112 - 0314088F 5013875A C656398D 8A2ED19D 2A85C8ED D3EC2AEF""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - AA87CA22 BE8B0537 8EB1C71E F320AD74 6E1D3B62 8BA79B98 - 59F741E0 82542A38 5502F25D BF55296C 3A545E38 72760AB7""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 3617DE4A 96262C6F 5D9E98BF 9292DC29 F8F41DBD 289A147C - E9DA3113 B5F0B8C0 0A60B1CE 1D7E819D 7A431D7C 90EA0E5F""" - ), - 16, -) - -curve_384 = ellipticcurve.CurveFp(_p, -3, _b, 1) -generator_384 = ellipticcurve.PointJacobi( - curve_384, _Gx, _Gy, 1, _r, generator=True -) - -# NIST Curve P-521: -_p = int( - "686479766013060971498190079908139321726943530014330540939" - "446345918554318339765605212255964066145455497729631139148" - "0858037121987999716643812574028291115057151" -) -_r = int( - "686479766013060971498190079908139321726943530014330540939" - "446345918554318339765539424505774633321719753296399637136" - "3321113864768612440380340372808892707005449" -) -# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL -# c = int(remove_whitespace( -# """ -# 0b4 8bfa5f42 0a349495 39d2bdfc 264eeeeb 077688e4 -# 4fbf0ad8 f6d0edb3 7bd6b533 28100051 8e19f1b9 ffbe0fe9 -# ed8a3c22 00b8f875 e523868c 70c1e5bf 55bad637""" -# ), 16) -_b = int( - remove_whitespace( - """ - 051 953EB961 8E1C9A1F 929A21A0 B68540EE A2DA725B - 99B315F3 B8B48991 8EF109E1 56193951 EC7E937B 1652C0BD - 3BB1BF07 3573DF88 3D2C34F1 EF451FD4 6B503F00""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - C6 858E06B7 0404E9CD 9E3ECB66 2395B442 9C648139 - 053FB521 F828AF60 6B4D3DBA A14B5E77 EFE75928 FE1DC127 - A2FFA8DE 3348B3C1 856A429B F97E7E31 C2E5BD66""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 118 39296A78 9A3BC004 5C8A5FB4 2C7D1BD9 98F54449 - 579B4468 17AFBD17 273E662C 97EE7299 5EF42640 C550B901 - 3FAD0761 353C7086 A272C240 88BE9476 9FD16650""" - ), - 16, -) - -curve_521 = ellipticcurve.CurveFp(_p, -3, _b, 1) -generator_521 = ellipticcurve.PointJacobi( - curve_521, _Gx, _Gy, 1, _r, generator=True -) - -# Certicom secp256-k1 -_a = 0x0000000000000000000000000000000000000000000000000000000000000000 -_b = 0x0000000000000000000000000000000000000000000000000000000000000007 -_p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F -_Gx = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 -_Gy = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 -_r = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 - -curve_secp256k1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_secp256k1 = ellipticcurve.PointJacobi( - curve_secp256k1, _Gx, _Gy, 1, _r, generator=True -) - -# Brainpool P-160-r1 -_a = 0x340E7BE2A280EB74E2BE61BADA745D97E8F7C300 -_b = 0x1E589A8595423412134FAA2DBDEC95C8D8675E58 -_p = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620F -_Gx = 0xBED5AF16EA3F6A4F62938C4631EB5AF7BDBCDBC3 -_Gy = 0x1667CB477A1A8EC338F94741669C976316DA6321 -_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09 - -curve_brainpoolp160r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp160r1 = ellipticcurve.PointJacobi( - curve_brainpoolp160r1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-160-t1 -_a = 0xE95E4A5F737059DC60DFC7AD95B3D8139515620C -_b = 0x7A556B6DAE535B7B51ED2C4D7DAA7A0B5C55F380 -# _z = 0x24DBFF5DEC9B986BBFE5295A29BFBAE45E0F5D0B -_Gx = 0xB199B13B9B34EFC1397E64BAEB05ACC265FF2378 -_Gy = 0xADD6718B7C7C1961F0991B842443772152C9E0AD -_q = 0xE95E4A5F737059DC60DF5991D45029409E60FC09 -curve_brainpoolp160t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp160t1 = ellipticcurve.PointJacobi( - curve_brainpoolp160t1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-192-r1 -_a = 0x6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF -_b = 0x469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9 -_p = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297 -_Gx = 0xC0A0647EAAB6A48753B033C56CB0F0900A2F5C4853375FD6 -_Gy = 0x14B690866ABD5BB88B5F4828C1490002E6773FA2FA299B8F -_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1 - -curve_brainpoolp192r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp192r1 = ellipticcurve.PointJacobi( - curve_brainpoolp192r1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-192-t1 -_a = 0xC302F41D932A36CDA7A3463093D18DB78FCE476DE1A86294 -_b = 0x13D56FFAEC78681E68F9DEB43B35BEC2FB68542E27897B79 -# _z = 0x1B6F5CC8DB4DC7AF19458A9CB80DC2295E5EB9C3732104CB -_Gx = 0x3AE9E58C82F63C30282E1FE7BBF43FA72C446AF6F4618129 -_Gy = 0x097E2C5667C2223A902AB5CA449D0084B7E5B3DE7CCC01C9 -_q = 0xC302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1 - -curve_brainpoolp192t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp192t1 = ellipticcurve.PointJacobi( - curve_brainpoolp192t1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-224-r1 -_a = 0x68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43 -_b = 0x2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B -_p = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF -_Gx = 0x0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D -_Gy = 0x58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD -_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F - -curve_brainpoolp224r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp224r1 = ellipticcurve.PointJacobi( - curve_brainpoolp224r1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-224-t1 -_a = 0xD7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FC -_b = 0x4B337D934104CD7BEF271BF60CED1ED20DA14C08B3BB64F18A60888D -# _z = 0x2DF271E14427A346910CF7A2E6CFA7B3F484E5C2CCE1C8B730E28B3F -_Gx = 0x6AB1E344CE25FF3896424E7FFE14762ECB49F8928AC0C76029B4D580 -_Gy = 0x0374E9F5143E568CD23F3F4D7C0D4B1E41C8CC0D1C6ABD5F1A46DB4C -_q = 0xD7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F - -curve_brainpoolp224t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp224t1 = ellipticcurve.PointJacobi( - curve_brainpoolp224t1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-256-r1 -_a = 0x7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9 -_b = 0x26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6 -_p = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377 -_Gx = 0x8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262 -_Gy = 0x547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997 -_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 - -curve_brainpoolp256r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp256r1 = ellipticcurve.PointJacobi( - curve_brainpoolp256r1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-256-t1 -_a = 0xA9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5374 -_b = 0x662C61C430D84EA4FE66A7733D0B76B7BF93EBC4AF2F49256AE58101FEE92B04 -# _z = 0x3E2D4BD9597B58639AE7AA669CAB9837CF5CF20A2C852D10F655668DFC150EF0 -_Gx = 0xA3E8EB3CC1CFE7B7732213B23A656149AFA142C47AAFBC2B79A191562E1305F4 -_Gy = 0x2D996C823439C56D7F7B22E14644417E69BCB6DE39D027001DABE8F35B25C9BE -_q = 0xA9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7 - -curve_brainpoolp256t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp256t1 = ellipticcurve.PointJacobi( - curve_brainpoolp256t1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-320-r1 -_a = int( - remove_whitespace( - """ - 3EE30B568FBAB0F883CCEBD46D3F3BB8A2A73513F5EB79DA66190EB085FFA9 - F492F375A97D860EB4""" - ), - 16, -) -_b = int( - remove_whitespace( - """ - 520883949DFDBC42D3AD198640688A6FE13F41349554B49ACC31DCCD884539 - 816F5EB4AC8FB1F1A6""" - ), - 16, -) -_p = int( - remove_whitespace( - """ - D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC - 28FCD412B1F1B32E27""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - 43BD7E9AFB53D8B85289BCC48EE5BFE6F20137D10A087EB6E7871E2A10A599 - C710AF8D0D39E20611""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 14FDD05545EC1CC8AB4093247F77275E0743FFED117182EAA9C77877AAAC6A - C7D35245D1692E8EE1""" - ), - 16, -) -_q = int( - remove_whitespace( - """ - D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658 - E98691555B44C59311""" - ), - 16, -) - -curve_brainpoolp320r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp320r1 = ellipticcurve.PointJacobi( - curve_brainpoolp320r1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-320-t1 -_a = int( - remove_whitespace( - """ - D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC - 28FCD412B1F1B32E24""" - ), - 16, -) -_b = int( - remove_whitespace( - """ - A7F561E038EB1ED560B3D147DB782013064C19F27ED27C6780AAF77FB8A547 - CEB5B4FEF422340353""" - ), - 16, -) -# _z = int( -# remove_whitespace( -# """ -# 15F75CAF668077F7E85B42EB01F0A81FF56ECD6191D55CB82B7D861458A18F -# EFC3E5AB7496F3C7B1""" -# ), -# 16, -# ) -_Gx = int( - remove_whitespace( - """ - 925BE9FB01AFC6FB4D3E7D4990010F813408AB106C4F09CB7EE07868CC136F - FF3357F624A21BED52""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 63BA3A7A27483EBF6671DBEF7ABB30EBEE084E58A0B077AD42A5A0989D1EE7 - 1B1B9BC0455FB0D2C3""" - ), - 16, -) -_q = int( - remove_whitespace( - """ - D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658 - E98691555B44C59311""" - ), - 16, -) - -curve_brainpoolp320t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp320t1 = ellipticcurve.PointJacobi( - curve_brainpoolp320t1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-384-r1 -_a = int( - remove_whitespace( - """ - 7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F9 - 0F8AA5814A503AD4EB04A8C7DD22CE2826""" - ), - 16, -) -_b = int( - remove_whitespace( - """ - 04A8C7DD22CE28268B39B55416F0447C2FB77DE107DCD2A62E880EA53EEB62 - D57CB4390295DBC9943AB78696FA504C11""" - ), - 16, -) -_p = int( - remove_whitespace( - """ - 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711 - 23ACD3A729901D1A71874700133107EC53""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - 1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10 - E8E826E03436D646AAEF87B2E247D4AF1E""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF991292 - 80E4646217791811142820341263C5315""" - ), - 16, -) -_q = int( - remove_whitespace( - """ - 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425 - A7CF3AB6AF6B7FC3103B883202E9046565""" - ), - 16, -) - -curve_brainpoolp384r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp384r1 = ellipticcurve.PointJacobi( - curve_brainpoolp384r1, _Gx, _Gy, 1, _q, generator=True -) - -_a = int( - remove_whitespace( - """ - 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB711 - 23ACD3A729901D1A71874700133107EC50""" - ), - 16, -) -_b = int( - remove_whitespace( - """ - 7F519EADA7BDA81BD826DBA647910F8C4B9346ED8CCDC64E4B1ABD11756DCE - 1D2074AA263B88805CED70355A33B471EE""" - ), - 16, -) -# _z = int( -# remove_whitespace( -# """ -# 41DFE8DD399331F7166A66076734A89CD0D2BCDB7D068E44E1F378F41ECBAE -# 97D2D63DBC87BCCDDCCC5DA39E8589291C""" -# ), -# 16, -# ) -_Gx = int( - remove_whitespace( - """ - 18DE98B02DB9A306F2AFCD7235F72A819B80AB12EBD653172476FECD462AAB - FFC4FF191B946A5F54D8D0AA2F418808CC""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 25AB056962D30651A114AFD2755AD336747F93475B7A1FCA3B88F2B6A208CC - FE469408584DC2B2912675BF5B9E582928""" - ), - 16, -) -_q = int( - remove_whitespace( - """ - 8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425 - A7CF3AB6AF6B7FC3103B883202E9046565""" - ), - 16, -) - -curve_brainpoolp384t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp384t1 = ellipticcurve.PointJacobi( - curve_brainpoolp384t1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-512-r1 -_a = int( - remove_whitespace( - """ - 7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863 - BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA""" - ), - 16, -) -_b = int( - remove_whitespace( - """ - 3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117 - A72BF2C7B9E7C1AC4D77FC94CADC083E67984050B75EBAE5DD2809BD638016F723""" - ), - 16, -) -_p = int( - remove_whitespace( - """ - AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 - 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3""" - ), - 16, -) -_Gx = int( - remove_whitespace( - """ - 81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D009 - 8EFF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F822""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 7DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F81 - 11B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892""" - ), - 16, -) -_q = int( - remove_whitespace( - """ - AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 - 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069""" - ), - 16, -) - -curve_brainpoolp512r1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp512r1 = ellipticcurve.PointJacobi( - curve_brainpoolp512r1, _Gx, _Gy, 1, _q, generator=True -) - -# Brainpool P-512-t1 -_a = int( - remove_whitespace( - """ - AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 - 717D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F0""" - ), - 16, -) -_b = int( - remove_whitespace( - """ - 7CBBBCF9441CFAB76E1890E46884EAE321F70C0BCB4981527897504BEC3E36 - A62BCDFA2304976540F6450085F2DAE145C22553B465763689180EA2571867423E""" - ), - 16, -) -# _z = int( -# remove_whitespace( -# """ -# 12EE58E6764838B69782136F0F2D3BA06E27695716054092E60A80BEDB212B -# 64E585D90BCE13761F85C3F1D2A64E3BE8FEA2220F01EBA5EEB0F35DBD29D922AB""" -# ), -# 16, -# ) -_Gx = int( - remove_whitespace( - """ - 640ECE5C12788717B9C1BA06CBC2A6FEBA85842458C56DDE9DB1758D39C031 - 3D82BA51735CDB3EA499AA77A7D6943A64F7A3F25FE26F06B51BAA2696FA9035DA""" - ), - 16, -) -_Gy = int( - remove_whitespace( - """ - 5B534BD595F5AF0FA2C892376C84ACE1BB4E3019B71634C01131159CAE03CE - E9D9932184BEEF216BD71DF2DADF86A627306ECFF96DBB8BACE198B61E00F8B332""" - ), - 16, -) -_q = int( - remove_whitespace( - """ - AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA703308 - 70553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069""" - ), - 16, -) - -curve_brainpoolp512t1 = ellipticcurve.CurveFp(_p, _a, _b, 1) -generator_brainpoolp512t1 = ellipticcurve.PointJacobi( - curve_brainpoolp512t1, _Gx, _Gy, 1, _q, generator=True -) diff --git a/venv/lib/python3.12/site-packages/ecdsa/eddsa.py b/venv/lib/python3.12/site-packages/ecdsa/eddsa.py deleted file mode 100644 index 9769cfd8..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/eddsa.py +++ /dev/null @@ -1,252 +0,0 @@ -"""Implementation of Edwards Digital Signature Algorithm.""" - -import hashlib -from ._sha3 import shake_256 -from . import ellipticcurve -from ._compat import ( - remove_whitespace, - bit_length, - bytes_to_int, - int_to_bytes, - compat26_str, -) - -# edwards25519, defined in RFC7748 -_p = 2**255 - 19 -_a = -1 -_d = int( - remove_whitespace( - "370957059346694393431380835087545651895421138798432190163887855330" - "85940283555" - ) -) -_h = 8 - -_Gx = int( - remove_whitespace( - "151122213495354007725011514095885315114540126930418572060461132" - "83949847762202" - ) -) -_Gy = int( - remove_whitespace( - "463168356949264781694283940034751631413079938662562256157830336" - "03165251855960" - ) -) -_r = 2**252 + 0x14DEF9DEA2F79CD65812631A5CF5D3ED - - -def _sha512(data): - return hashlib.new("sha512", compat26_str(data)).digest() - - -curve_ed25519 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _sha512) -generator_ed25519 = ellipticcurve.PointEdwards( - curve_ed25519, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True -) - - -# edwards448, defined in RFC7748 -_p = 2**448 - 2**224 - 1 -_a = 1 -_d = -39081 % _p -_h = 4 - -_Gx = int( - remove_whitespace( - "224580040295924300187604334099896036246789641632564134246125461" - "686950415467406032909029192869357953282578032075146446173674602635" - "247710" - ) -) -_Gy = int( - remove_whitespace( - "298819210078481492676017930443930673437544040154080242095928241" - "372331506189835876003536878655418784733982303233503462500531545062" - "832660" - ) -) -_r = 2**446 - 0x8335DC163BB124B65129C96FDE933D8D723A70AADC873D6D54A7BB0D - - -def _shake256(data): - return shake_256(data, 114) - - -curve_ed448 = ellipticcurve.CurveEdTw(_p, _a, _d, _h, _shake256) -generator_ed448 = ellipticcurve.PointEdwards( - curve_ed448, _Gx, _Gy, 1, _Gx * _Gy % _p, _r, generator=True -) - - -class PublicKey(object): - """Public key for the Edwards Digital Signature Algorithm.""" - - def __init__(self, generator, public_key, public_point=None): - self.generator = generator - self.curve = generator.curve() - self.__encoded = public_key - # plus one for the sign bit and round up - self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 - if len(public_key) != self.baselen: - raise ValueError( - "Incorrect size of the public key, expected: {0} bytes".format( - self.baselen - ) - ) - if public_point: - self.__point = public_point - else: - self.__point = ellipticcurve.PointEdwards.from_bytes( - self.curve, public_key - ) - - def __eq__(self, other): - if isinstance(other, PublicKey): - return ( - self.curve == other.curve and self.__encoded == other.__encoded - ) - return NotImplemented - - def __ne__(self, other): - return not self == other - - @property - def point(self): - return self.__point - - @point.setter - def point(self, other): - if self.__point != other: - raise ValueError("Can't change the coordinates of the point") - self.__point = other - - def public_point(self): - return self.__point - - def public_key(self): - return self.__encoded - - def verify(self, data, signature): - """Verify a Pure EdDSA signature over data.""" - data = compat26_str(data) - if len(signature) != 2 * self.baselen: - raise ValueError( - "Invalid signature length, expected: {0} bytes".format( - 2 * self.baselen - ) - ) - R = ellipticcurve.PointEdwards.from_bytes( - self.curve, signature[: self.baselen] - ) - S = bytes_to_int(signature[self.baselen :], "little") - if S >= self.generator.order(): - raise ValueError("Invalid signature") - - dom = bytearray() - if self.curve == curve_ed448: - dom = bytearray(b"SigEd448" + b"\x00\x00") - - k = bytes_to_int( - self.curve.hash_func(dom + R.to_bytes() + self.__encoded + data), - "little", - ) - - if self.generator * S != self.__point * k + R: - raise ValueError("Invalid signature") - - return True - - -class PrivateKey(object): - """Private key for the Edwards Digital Signature Algorithm.""" - - def __init__(self, generator, private_key): - self.generator = generator - self.curve = generator.curve() - # plus one for the sign bit and round up - self.baselen = (bit_length(self.curve.p()) + 1 + 7) // 8 - if len(private_key) != self.baselen: - raise ValueError( - "Incorrect size of private key, expected: {0} bytes".format( - self.baselen - ) - ) - self.__private_key = bytes(private_key) - self.__h = bytearray(self.curve.hash_func(private_key)) - self.__public_key = None - - a = self.__h[: self.baselen] - a = self._key_prune(a) - scalar = bytes_to_int(a, "little") - self.__s = scalar - - @property - def private_key(self): - return self.__private_key - - def __eq__(self, other): - if isinstance(other, PrivateKey): - return ( - self.curve == other.curve - and self.__private_key == other.__private_key - ) - return NotImplemented - - def __ne__(self, other): - return not self == other - - def _key_prune(self, key): - # make sure the key is not in a small subgroup - h = self.curve.cofactor() - if h == 4: - h_log = 2 - elif h == 8: - h_log = 3 - else: - raise ValueError("Only cofactor 4 and 8 curves supported") - key[0] &= ~((1 << h_log) - 1) - - # ensure the highest bit is set but no higher - l = bit_length(self.curve.p()) - if l % 8 == 0: - key[-1] = 0 - key[-2] |= 0x80 - else: - key[-1] = key[-1] & (1 << (l % 8)) - 1 | 1 << (l % 8) - 1 - return key - - def public_key(self): - """Generate the public key based on the included private key""" - if self.__public_key: - return self.__public_key - - public_point = self.generator * self.__s - - self.__public_key = PublicKey( - self.generator, public_point.to_bytes(), public_point - ) - - return self.__public_key - - def sign(self, data): - """Perform a Pure EdDSA signature over data.""" - data = compat26_str(data) - A = self.public_key().public_key() - - prefix = self.__h[self.baselen :] - - dom = bytearray() - if self.curve == curve_ed448: - dom = bytearray(b"SigEd448" + b"\x00\x00") - - r = bytes_to_int(self.curve.hash_func(dom + prefix + data), "little") - R = (self.generator * r).to_bytes() - - k = bytes_to_int(self.curve.hash_func(dom + R + A + data), "little") - k %= self.generator.order() - - S = (r + k * self.__s) % self.generator.order() - - return R + int_to_bytes(S, self.baselen, "little") diff --git a/venv/lib/python3.12/site-packages/ecdsa/ellipticcurve.py b/venv/lib/python3.12/site-packages/ecdsa/ellipticcurve.py deleted file mode 100644 index a982c1e4..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/ellipticcurve.py +++ /dev/null @@ -1,1609 +0,0 @@ -#! /usr/bin/env python -# -*- coding: utf-8 -*- -# -# Implementation of elliptic curves, for cryptographic applications. -# -# This module doesn't provide any way to choose a random elliptic -# curve, nor to verify that an elliptic curve was chosen randomly, -# because one can simply use NIST's standard curves. -# -# Notes from X9.62-1998 (draft): -# Nomenclature: -# - Q is a public key. -# The "Elliptic Curve Domain Parameters" include: -# - q is the "field size", which in our case equals p. -# - p is a big prime. -# - G is a point of prime order (5.1.1.1). -# - n is the order of G (5.1.1.1). -# Public-key validation (5.2.2): -# - Verify that Q is not the point at infinity. -# - Verify that X_Q and Y_Q are in [0,p-1]. -# - Verify that Q is on the curve. -# - Verify that nQ is the point at infinity. -# Signature generation (5.3): -# - Pick random k from [1,n-1]. -# Signature checking (5.4.2): -# - Verify that r and s are in [1,n-1]. -# -# Revision history: -# 2005.12.31 - Initial version. -# 2008.11.25 - Change CurveFp.is_on to contains_point. -# -# Written in 2005 by Peter Pearson and placed in the public domain. -# Modified extensively as part of python-ecdsa. - -from __future__ import division - -try: - from gmpy2 import mpz - - GMPY = True -except ImportError: # pragma: no branch - try: - from gmpy import mpz - - GMPY = True - except ImportError: - GMPY = False - - -from six import python_2_unicode_compatible -from . import numbertheory -from ._compat import normalise_bytes, int_to_bytes, bit_length, bytes_to_int -from .errors import MalformedPointError -from .util import orderlen, string_to_number, number_to_string - - -@python_2_unicode_compatible -class CurveFp(object): - """ - :term:`Short Weierstrass Elliptic Curve ` over a - prime field. - """ - - if GMPY: # pragma: no branch - - def __init__(self, p, a, b, h=None): - """ - The curve of points satisfying y^2 = x^3 + a*x + b (mod p). - - h is an integer that is the cofactor of the elliptic curve domain - parameters; it is the number of points satisfying the elliptic - curve equation divided by the order of the base point. It is used - for selection of efficient algorithm for public point verification. - """ - self.__p = mpz(p) - self.__a = mpz(a) - self.__b = mpz(b) - # h is not used in calculations and it can be None, so don't use - # gmpy with it - self.__h = h - - else: # pragma: no branch - - def __init__(self, p, a, b, h=None): - """ - The curve of points satisfying y^2 = x^3 + a*x + b (mod p). - - h is an integer that is the cofactor of the elliptic curve domain - parameters; it is the number of points satisfying the elliptic - curve equation divided by the order of the base point. It is used - for selection of efficient algorithm for public point verification. - """ - self.__p = p - self.__a = a - self.__b = b - self.__h = h - - def __eq__(self, other): - """Return True if other is an identical curve, False otherwise. - - Note: the value of the cofactor of the curve is not taken into account - when comparing curves, as it's derived from the base point and - intrinsic curve characteristic (but it's complex to compute), - only the prime and curve parameters are considered. - """ - if isinstance(other, CurveFp): - p = self.__p - return ( - self.__p == other.__p - and self.__a % p == other.__a % p - and self.__b % p == other.__b % p - ) - return NotImplemented - - def __ne__(self, other): - """Return False if other is an identical curve, True otherwise.""" - return not self == other - - def __hash__(self): - return hash((self.__p, self.__a, self.__b)) - - def p(self): - return self.__p - - def a(self): - return self.__a - - def b(self): - return self.__b - - def cofactor(self): - return self.__h - - def contains_point(self, x, y): - """Is the point (x,y) on this curve?""" - return (y * y - ((x * x + self.__a) * x + self.__b)) % self.__p == 0 - - def __str__(self): - if self.__h is not None: - return "CurveFp(p={0}, a={1}, b={2}, h={3})".format( - self.__p, - self.__a, - self.__b, - self.__h, - ) - return "CurveFp(p={0}, a={1}, b={2})".format( - self.__p, - self.__a, - self.__b, - ) - - -class CurveEdTw(object): - """Parameters for a Twisted Edwards Elliptic Curve""" - - if GMPY: # pragma: no branch - - def __init__(self, p, a, d, h=None, hash_func=None): - """ - The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). - - h is the cofactor of the curve. - hash_func is the hash function associated with the curve - (like SHA-512 for Ed25519) - """ - self.__p = mpz(p) - self.__a = mpz(a) - self.__d = mpz(d) - self.__h = h - self.__hash_func = hash_func - - else: - - def __init__(self, p, a, d, h=None, hash_func=None): - """ - The curve of points satisfying a*x^2 + y^2 = 1 + d*x^2*y^2 (mod p). - - h is the cofactor of the curve. - hash_func is the hash function associated with the curve - (like SHA-512 for Ed25519) - """ - self.__p = p - self.__a = a - self.__d = d - self.__h = h - self.__hash_func = hash_func - - def __eq__(self, other): - """Returns True if other is an identical curve.""" - if isinstance(other, CurveEdTw): - p = self.__p - return ( - self.__p == other.__p - and self.__a % p == other.__a % p - and self.__d % p == other.__d % p - ) - return NotImplemented - - def __ne__(self, other): - """Return False if the other is an identical curve, True otherwise.""" - return not self == other - - def __hash__(self): - return hash((self.__p, self.__a, self.__d)) - - def contains_point(self, x, y): - """Is the point (x, y) on this curve?""" - return ( - self.__a * x * x + y * y - 1 - self.__d * x * x * y * y - ) % self.__p == 0 - - def p(self): - return self.__p - - def a(self): - return self.__a - - def d(self): - return self.__d - - def hash_func(self, data): - return self.__hash_func(data) - - def cofactor(self): - return self.__h - - def __str__(self): - if self.__h is not None: - return "CurveEdTw(p={0}, a={1}, d={2}, h={3})".format( - self.__p, - self.__a, - self.__d, - self.__h, - ) - return "CurveEdTw(p={0}, a={1}, d={2})".format( - self.__p, - self.__a, - self.__d, - ) - - -class AbstractPoint(object): - """Class for common methods of elliptic curve points.""" - - @staticmethod - def _from_raw_encoding(data, raw_encoding_length): - """ - Decode public point from :term:`raw encoding`. - - :term:`raw encoding` is the same as the :term:`uncompressed` encoding, - but without the 0x04 byte at the beginning. - """ - # real assert, from_bytes() should not call us with different length - assert len(data) == raw_encoding_length - xs = data[: raw_encoding_length // 2] - ys = data[raw_encoding_length // 2 :] - # real assert, raw_encoding_length is calculated by multiplying an - # integer by two so it will always be even - assert len(xs) == raw_encoding_length // 2 - assert len(ys) == raw_encoding_length // 2 - coord_x = string_to_number(xs) - coord_y = string_to_number(ys) - - return coord_x, coord_y - - @staticmethod - def _from_compressed(data, curve): - """Decode public point from compressed encoding.""" - if data[:1] not in (b"\x02", b"\x03"): - raise MalformedPointError("Malformed compressed point encoding") - - is_even = data[:1] == b"\x02" - x = string_to_number(data[1:]) - p = curve.p() - alpha = (pow(x, 3, p) + (curve.a() * x) + curve.b()) % p - try: - beta = numbertheory.square_root_mod_prime(alpha, p) - except numbertheory.Error as e: - raise MalformedPointError( - "Encoding does not correspond to a point on curve", e - ) - if is_even == bool(beta & 1): - y = p - beta - else: - y = beta - return x, y - - @classmethod - def _from_hybrid(cls, data, raw_encoding_length, validate_encoding): - """Decode public point from hybrid encoding.""" - # real assert, from_bytes() should not call us with different types - assert data[:1] in (b"\x06", b"\x07") - - # primarily use the uncompressed as it's easiest to handle - x, y = cls._from_raw_encoding(data[1:], raw_encoding_length) - - # but validate if it's self-consistent if we're asked to do that - if validate_encoding and ( - y & 1 - and data[:1] != b"\x07" - or (not y & 1) - and data[:1] != b"\x06" - ): - raise MalformedPointError("Inconsistent hybrid point encoding") - - return x, y - - @classmethod - def _from_edwards(cls, curve, data): - """Decode a point on an Edwards curve.""" - data = bytearray(data) - p = curve.p() - # add 1 for the sign bit and then round up - exp_len = (bit_length(p) + 1 + 7) // 8 - if len(data) != exp_len: - raise MalformedPointError("Point length doesn't match the curve.") - x_0 = (data[-1] & 0x80) >> 7 - - data[-1] &= 0x80 - 1 - - y = bytes_to_int(data, "little") - if GMPY: - y = mpz(y) - - x2 = ( - (y * y - 1) - * numbertheory.inverse_mod(curve.d() * y * y - curve.a(), p) - % p - ) - - try: - x = numbertheory.square_root_mod_prime(x2, p) - except numbertheory.Error as e: - raise MalformedPointError( - "Encoding does not correspond to a point on curve", e - ) - - if x % 2 != x_0: - x = -x % p - - return x, y - - @classmethod - def from_bytes( - cls, curve, data, validate_encoding=True, valid_encodings=None - ): - """ - Initialise the object from byte encoding of a point. - - The method does accept and automatically detect the type of point - encoding used. It supports the :term:`raw encoding`, - :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. - - Note: generally you will want to call the ``from_bytes()`` method of - either a child class, PointJacobi or Point. - - :param data: single point encoding of the public key - :type data: :term:`bytes-like object` - :param curve: the curve on which the public key is expected to lay - :type curve: ~ecdsa.ellipticcurve.CurveFp - :param validate_encoding: whether to verify that the encoding of the - point is self-consistent, defaults to True, has effect only - on ``hybrid`` encoding - :type validate_encoding: bool - :param valid_encodings: list of acceptable point encoding formats, - supported ones are: :term:`uncompressed`, :term:`compressed`, - :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` - name). All formats by default (specified with ``None``). - :type valid_encodings: :term:`set-like object` - - :raises `~ecdsa.errors.MalformedPointError`: if the public point does - not lay on the curve or the encoding is invalid - - :return: x and y coordinates of the encoded point - :rtype: tuple(int, int) - """ - if not valid_encodings: - valid_encodings = set( - ["uncompressed", "compressed", "hybrid", "raw"] - ) - if not all( - i in set(("uncompressed", "compressed", "hybrid", "raw")) - for i in valid_encodings - ): - raise ValueError( - "Only uncompressed, compressed, hybrid or raw encoding " - "supported." - ) - data = normalise_bytes(data) - - if isinstance(curve, CurveEdTw): - return cls._from_edwards(curve, data) - - key_len = len(data) - raw_encoding_length = 2 * orderlen(curve.p()) - if key_len == raw_encoding_length and "raw" in valid_encodings: - coord_x, coord_y = cls._from_raw_encoding( - data, raw_encoding_length - ) - elif key_len == raw_encoding_length + 1 and ( - "hybrid" in valid_encodings or "uncompressed" in valid_encodings - ): - if data[:1] in (b"\x06", b"\x07") and "hybrid" in valid_encodings: - coord_x, coord_y = cls._from_hybrid( - data, raw_encoding_length, validate_encoding - ) - elif data[:1] == b"\x04" and "uncompressed" in valid_encodings: - coord_x, coord_y = cls._from_raw_encoding( - data[1:], raw_encoding_length - ) - else: - raise MalformedPointError( - "Invalid X9.62 encoding of the public point" - ) - elif ( - key_len == raw_encoding_length // 2 + 1 - and "compressed" in valid_encodings - ): - coord_x, coord_y = cls._from_compressed(data, curve) - else: - raise MalformedPointError( - "Length of string does not match lengths of " - "any of the enabled ({0}) encodings of the " - "curve.".format(", ".join(valid_encodings)) - ) - return coord_x, coord_y - - def _raw_encode(self): - """Convert the point to the :term:`raw encoding`.""" - prime = self.curve().p() - x_str = number_to_string(self.x(), prime) - y_str = number_to_string(self.y(), prime) - return x_str + y_str - - def _compressed_encode(self): - """Encode the point into the compressed form.""" - prime = self.curve().p() - x_str = number_to_string(self.x(), prime) - if self.y() & 1: - return b"\x03" + x_str - return b"\x02" + x_str - - def _hybrid_encode(self): - """Encode the point into the hybrid form.""" - raw_enc = self._raw_encode() - if self.y() & 1: - return b"\x07" + raw_enc - return b"\x06" + raw_enc - - def _edwards_encode(self): - """Encode the point according to RFC8032 encoding.""" - self.scale() - x, y, p = self.x(), self.y(), self.curve().p() - - # add 1 for the sign bit and then round up - enc_len = (bit_length(p) + 1 + 7) // 8 - y_str = int_to_bytes(y, enc_len, "little") - if x % 2: - y_str[-1] |= 0x80 - return y_str - - def to_bytes(self, encoding="raw"): - """ - Convert the point to a byte string. - - The method by default uses the :term:`raw encoding` (specified - by `encoding="raw"`. It can also output points in :term:`uncompressed`, - :term:`compressed`, and :term:`hybrid` formats. - - For points on Edwards curves `encoding` is ignored and only the - encoding defined in RFC 8032 is supported. - - :return: :term:`raw encoding` of a public on the curve - :rtype: bytes - """ - assert encoding in ("raw", "uncompressed", "compressed", "hybrid") - curve = self.curve() - if isinstance(curve, CurveEdTw): - return self._edwards_encode() - elif encoding == "raw": - return self._raw_encode() - elif encoding == "uncompressed": - return b"\x04" + self._raw_encode() - elif encoding == "hybrid": - return self._hybrid_encode() - else: - return self._compressed_encode() - - @staticmethod - def _naf(mult): - """Calculate non-adjacent form of number.""" - ret = [] - while mult: - if mult % 2: - nd = mult % 4 - if nd >= 2: - nd -= 4 - ret.append(nd) - mult -= nd - else: - ret.append(0) - mult //= 2 - return ret - - -class PointJacobi(AbstractPoint): - """ - Point on a short Weierstrass elliptic curve. Uses Jacobi coordinates. - - In Jacobian coordinates, there are three parameters, X, Y and Z. - They correspond to affine parameters 'x' and 'y' like so: - - x = X / Z² - y = Y / Z³ - """ - - def __init__(self, curve, x, y, z, order=None, generator=False): - """ - Initialise a point that uses Jacobi representation internally. - - :param CurveFp curve: curve on which the point resides - :param int x: the X parameter of Jacobi representation (equal to x when - converting from affine coordinates - :param int y: the Y parameter of Jacobi representation (equal to y when - converting from affine coordinates - :param int z: the Z parameter of Jacobi representation (equal to 1 when - converting from affine coordinates - :param int order: the point order, must be non zero when using - generator=True - :param bool generator: the point provided is a curve generator, as - such, it will be commonly used with scalar multiplication. This will - cause to precompute multiplication table generation for it - """ - super(PointJacobi, self).__init__() - self.__curve = curve - if GMPY: # pragma: no branch - self.__coords = (mpz(x), mpz(y), mpz(z)) - self.__order = order and mpz(order) - else: # pragma: no branch - self.__coords = (x, y, z) - self.__order = order - self.__generator = generator - self.__precompute = [] - - @classmethod - def from_bytes( - cls, - curve, - data, - validate_encoding=True, - valid_encodings=None, - order=None, - generator=False, - ): - """ - Initialise the object from byte encoding of a point. - - The method does accept and automatically detect the type of point - encoding used. It supports the :term:`raw encoding`, - :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. - - :param data: single point encoding of the public key - :type data: :term:`bytes-like object` - :param curve: the curve on which the public key is expected to lay - :type curve: ~ecdsa.ellipticcurve.CurveFp - :param validate_encoding: whether to verify that the encoding of the - point is self-consistent, defaults to True, has effect only - on ``hybrid`` encoding - :type validate_encoding: bool - :param valid_encodings: list of acceptable point encoding formats, - supported ones are: :term:`uncompressed`, :term:`compressed`, - :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` - name). All formats by default (specified with ``None``). - :type valid_encodings: :term:`set-like object` - :param int order: the point order, must be non zero when using - generator=True - :param bool generator: the point provided is a curve generator, as - such, it will be commonly used with scalar multiplication. This - will cause to precompute multiplication table generation for it - - :raises `~ecdsa.errors.MalformedPointError`: if the public point does - not lay on the curve or the encoding is invalid - - :return: Point on curve - :rtype: PointJacobi - """ - coord_x, coord_y = super(PointJacobi, cls).from_bytes( - curve, data, validate_encoding, valid_encodings - ) - return PointJacobi(curve, coord_x, coord_y, 1, order, generator) - - def _maybe_precompute(self): - if not self.__generator or self.__precompute: - return - - # since this code will execute just once, and it's fully deterministic, - # depend on atomicity of the last assignment to switch from empty - # self.__precompute to filled one and just ignore the unlikely - # situation when two threads execute it at the same time (as it won't - # lead to inconsistent __precompute) - order = self.__order - assert order - precompute = [] - i = 1 - order *= 2 - coord_x, coord_y, coord_z = self.__coords - doubler = PointJacobi(self.__curve, coord_x, coord_y, coord_z, order) - order *= 2 - precompute.append((doubler.x(), doubler.y())) - - while i < order: - i *= 2 - doubler = doubler.double().scale() - precompute.append((doubler.x(), doubler.y())) - - self.__precompute = precompute - - def __getstate__(self): - # while this code can execute at the same time as _maybe_precompute() - # is updating the __precompute or scale() is updating the __coords, - # there is no requirement for consistency between __coords and - # __precompute - state = self.__dict__.copy() - return state - - def __setstate__(self, state): - self.__dict__.update(state) - - def __eq__(self, other): - """Compare for equality two points with each-other. - - Note: only points that lay on the same curve can be equal. - """ - x1, y1, z1 = self.__coords - if other is INFINITY: - return not z1 - if isinstance(other, Point): - x2, y2, z2 = other.x(), other.y(), 1 - elif isinstance(other, PointJacobi): - x2, y2, z2 = other.__coords - else: - return NotImplemented - if self.__curve != other.curve(): - return False - p = self.__curve.p() - - zz1 = z1 * z1 % p - zz2 = z2 * z2 % p - - # compare the fractions by bringing them to the same denominator - # depend on short-circuit to save 4 multiplications in case of - # inequality - return (x1 * zz2 - x2 * zz1) % p == 0 and ( - y1 * zz2 * z2 - y2 * zz1 * z1 - ) % p == 0 - - def __ne__(self, other): - """Compare for inequality two points with each-other.""" - return not self == other - - def order(self): - """Return the order of the point. - - None if it is undefined. - """ - return self.__order - - def curve(self): - """Return curve over which the point is defined.""" - return self.__curve - - def x(self): - """ - Return affine x coordinate. - - This method should be used only when the 'y' coordinate is not needed. - It's computationally more efficient to use `to_affine()` and then - call x() and y() on the returned instance. Or call `scale()` - and then x() and y() on the returned instance. - """ - x, _, z = self.__coords - if z == 1: - return x - p = self.__curve.p() - z = numbertheory.inverse_mod(z, p) - return x * z**2 % p - - def y(self): - """ - Return affine y coordinate. - - This method should be used only when the 'x' coordinate is not needed. - It's computationally more efficient to use `to_affine()` and then - call x() and y() on the returned instance. Or call `scale()` - and then x() and y() on the returned instance. - """ - _, y, z = self.__coords - if z == 1: - return y - p = self.__curve.p() - z = numbertheory.inverse_mod(z, p) - return y * z**3 % p - - def scale(self): - """ - Return point scaled so that z == 1. - - Modifies point in place, returns self. - """ - x, y, z = self.__coords - if z == 1: - return self - - # scaling is deterministic, so even if two threads execute the below - # code at the same time, they will set __coords to the same value - p = self.__curve.p() - z_inv = numbertheory.inverse_mod(z, p) - zz_inv = z_inv * z_inv % p - x = x * zz_inv % p - y = y * zz_inv * z_inv % p - self.__coords = (x, y, 1) - return self - - def to_affine(self): - """Return point in affine form.""" - _, _, z = self.__coords - p = self.__curve.p() - if not (z % p): - return INFINITY - self.scale() - x, y, z = self.__coords - assert z == 1 - return Point(self.__curve, x, y, self.__order) - - @staticmethod - def from_affine(point, generator=False): - """Create from an affine point. - - :param bool generator: set to True to make the point to precalculate - multiplication table - useful for public point when verifying many - signatures (around 100 or so) or for generator points of a curve. - """ - return PointJacobi( - point.curve(), point.x(), point.y(), 1, point.order(), generator - ) - - # please note that all the methods that use the equations from - # hyperelliptic - # are formatted in a way to maximise performance. - # Things that make code faster: multiplying instead of taking to the power - # (`xx = x * x; xxxx = xx * xx % p` is faster than `xxxx = x**4 % p` and - # `pow(x, 4, p)`), - # multiple assignments at the same time (`x1, x2 = self.x1, self.x2` is - # faster than `x1 = self.x1; x2 = self.x2`), - # similarly, sometimes the `% p` is skipped if it makes the calculation - # faster and the result of calculation is later reduced modulo `p` - - def _double_with_z_1(self, X1, Y1, p, a): - """Add a point to itself with z == 1.""" - # after: - # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-mdbl-2007-bl - XX, YY = X1 * X1 % p, Y1 * Y1 % p - if not YY: - return 0, 0, 0 - YYYY = YY * YY % p - S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p - M = 3 * XX + a - T = (M * M - 2 * S) % p - # X3 = T - Y3 = (M * (S - T) - 8 * YYYY) % p - Z3 = 2 * Y1 % p - return T, Y3, Z3 - - def _double(self, X1, Y1, Z1, p, a): - """Add a point to itself, arbitrary z.""" - if Z1 == 1: - return self._double_with_z_1(X1, Y1, p, a) - if not Z1: - return 0, 0, 0 - # after: - # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#doubling-dbl-2007-bl - XX, YY = X1 * X1 % p, Y1 * Y1 % p - if not YY: - return 0, 0, 0 - YYYY = YY * YY % p - ZZ = Z1 * Z1 % p - S = 2 * ((X1 + YY) ** 2 - XX - YYYY) % p - M = (3 * XX + a * ZZ * ZZ) % p - T = (M * M - 2 * S) % p - # X3 = T - Y3 = (M * (S - T) - 8 * YYYY) % p - Z3 = ((Y1 + Z1) ** 2 - YY - ZZ) % p - - return T, Y3, Z3 - - def double(self): - """Add a point to itself.""" - X1, Y1, Z1 = self.__coords - - if not Z1: - return INFINITY - - p, a = self.__curve.p(), self.__curve.a() - - X3, Y3, Z3 = self._double(X1, Y1, Z1, p, a) - - if not Z3: - return INFINITY - return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) - - def _add_with_z_1(self, X1, Y1, X2, Y2, p): - """add points when both Z1 and Z2 equal 1""" - # after: - # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-mmadd-2007-bl - H = X2 - X1 - HH = H * H - I = 4 * HH % p - J = H * I - r = 2 * (Y2 - Y1) - if not H and not r: - return self._double_with_z_1(X1, Y1, p, self.__curve.a()) - V = X1 * I - X3 = (r**2 - J - 2 * V) % p - Y3 = (r * (V - X3) - 2 * Y1 * J) % p - Z3 = 2 * H % p - return X3, Y3, Z3 - - def _add_with_z_eq(self, X1, Y1, Z1, X2, Y2, p): - """add points when Z1 == Z2""" - # after: - # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-zadd-2007-m - A = (X2 - X1) ** 2 % p - B = X1 * A % p - C = X2 * A - D = (Y2 - Y1) ** 2 % p - if not A and not D: - return self._double(X1, Y1, Z1, p, self.__curve.a()) - X3 = (D - B - C) % p - Y3 = ((Y2 - Y1) * (B - X3) - Y1 * (C - B)) % p - Z3 = Z1 * (X2 - X1) % p - return X3, Y3, Z3 - - def _add_with_z2_1(self, X1, Y1, Z1, X2, Y2, p): - """add points when Z2 == 1""" - # after: - # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-madd-2007-bl - Z1Z1 = Z1 * Z1 % p - U2, S2 = X2 * Z1Z1 % p, Y2 * Z1 * Z1Z1 % p - H = (U2 - X1) % p - HH = H * H % p - I = 4 * HH % p - J = H * I - r = 2 * (S2 - Y1) % p - if not r and not H: - return self._double_with_z_1(X2, Y2, p, self.__curve.a()) - V = X1 * I - X3 = (r * r - J - 2 * V) % p - Y3 = (r * (V - X3) - 2 * Y1 * J) % p - Z3 = ((Z1 + H) ** 2 - Z1Z1 - HH) % p - return X3, Y3, Z3 - - def _add_with_z_ne(self, X1, Y1, Z1, X2, Y2, Z2, p): - """add points with arbitrary z""" - # after: - # http://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian.html#addition-add-2007-bl - Z1Z1 = Z1 * Z1 % p - Z2Z2 = Z2 * Z2 % p - U1 = X1 * Z2Z2 % p - U2 = X2 * Z1Z1 % p - S1 = Y1 * Z2 * Z2Z2 % p - S2 = Y2 * Z1 * Z1Z1 % p - H = U2 - U1 - I = 4 * H * H % p - J = H * I % p - r = 2 * (S2 - S1) % p - if not H and not r: - return self._double(X1, Y1, Z1, p, self.__curve.a()) - V = U1 * I - X3 = (r * r - J - 2 * V) % p - Y3 = (r * (V - X3) - 2 * S1 * J) % p - Z3 = ((Z1 + Z2) ** 2 - Z1Z1 - Z2Z2) * H % p - - return X3, Y3, Z3 - - def __radd__(self, other): - """Add other to self.""" - return self + other - - def _add(self, X1, Y1, Z1, X2, Y2, Z2, p): - """add two points, select fastest method.""" - if not Z1: - return X2 % p, Y2 % p, Z2 % p - if not Z2: - return X1 % p, Y1 % p, Z1 % p - if Z1 == Z2: - if Z1 == 1: - return self._add_with_z_1(X1, Y1, X2, Y2, p) - return self._add_with_z_eq(X1, Y1, Z1, X2, Y2, p) - if Z1 == 1: - return self._add_with_z2_1(X2, Y2, Z2, X1, Y1, p) - if Z2 == 1: - return self._add_with_z2_1(X1, Y1, Z1, X2, Y2, p) - return self._add_with_z_ne(X1, Y1, Z1, X2, Y2, Z2, p) - - def __add__(self, other): - """Add two points on elliptic curve.""" - if self == INFINITY: - return other - if other == INFINITY: - return self - if isinstance(other, Point): - other = PointJacobi.from_affine(other) - if self.__curve != other.__curve: - raise ValueError("The other point is on different curve") - - p = self.__curve.p() - X1, Y1, Z1 = self.__coords - X2, Y2, Z2 = other.__coords - - X3, Y3, Z3 = self._add(X1, Y1, Z1, X2, Y2, Z2, p) - - if not Z3: - return INFINITY - return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) - - def __rmul__(self, other): - """Multiply point by an integer.""" - return self * other - - def _mul_precompute(self, other): - """Multiply point by integer with precomputation table.""" - X3, Y3, Z3, p = 0, 0, 0, self.__curve.p() - _add = self._add - for X2, Y2 in self.__precompute: - if other % 2: - if other % 4 >= 2: - other = (other + 1) // 2 - X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) - else: - other = (other - 1) // 2 - X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) - else: - other //= 2 - - if not Z3: - return INFINITY - return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) - - def __mul__(self, other): - """Multiply point by an integer.""" - if not self.__coords[1] or not other: - return INFINITY - if other == 1: - return self - if self.__order: - # order*2 as a protection for Minerva - other = other % (self.__order * 2) - self._maybe_precompute() - if self.__precompute: - return self._mul_precompute(other) - - self = self.scale() - X2, Y2, _ = self.__coords - X3, Y3, Z3 = 0, 0, 0 - p, a = self.__curve.p(), self.__curve.a() - _double = self._double - _add = self._add - # since adding points when at least one of them is scaled - # is quicker, reverse the NAF order - for i in reversed(self._naf(other)): - X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) - if i < 0: - X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, 1, p) - elif i > 0: - X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, 1, p) - - if not Z3: - return INFINITY - - return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) - - def mul_add(self, self_mul, other, other_mul): - """ - Do two multiplications at the same time, add results. - - calculates self*self_mul + other*other_mul - """ - if other == INFINITY or other_mul == 0: - return self * self_mul - if self_mul == 0: - return other * other_mul - if not isinstance(other, PointJacobi): - other = PointJacobi.from_affine(other) - # when the points have precomputed answers, then multiplying them alone - # is faster (as it uses NAF and no point doublings) - self._maybe_precompute() - other._maybe_precompute() - if self.__precompute and other.__precompute: - return self * self_mul + other * other_mul - - if self.__order: - self_mul = self_mul % self.__order - other_mul = other_mul % self.__order - - # (X3, Y3, Z3) is the accumulator - X3, Y3, Z3 = 0, 0, 0 - p, a = self.__curve.p(), self.__curve.a() - - # as we have 6 unique points to work with, we can't scale all of them, - # but do scale the ones that are used most often - self.scale() - X1, Y1, Z1 = self.__coords - other.scale() - X2, Y2, Z2 = other.__coords - - _double = self._double - _add = self._add - - # with NAF we have 3 options: no add, subtract, add - # so with 2 points, we have 9 combinations: - # 0, -A, +A, -B, -A-B, +A-B, +B, -A+B, +A+B - # so we need 4 combined points: - mAmB_X, mAmB_Y, mAmB_Z = _add(X1, -Y1, Z1, X2, -Y2, Z2, p) - pAmB_X, pAmB_Y, pAmB_Z = _add(X1, Y1, Z1, X2, -Y2, Z2, p) - mApB_X, mApB_Y, mApB_Z = pAmB_X, -pAmB_Y, pAmB_Z - pApB_X, pApB_Y, pApB_Z = mAmB_X, -mAmB_Y, mAmB_Z - # when the self and other sum to infinity, we need to add them - # one by one to get correct result but as that's very unlikely to - # happen in regular operation, we don't need to optimise this case - if not pApB_Z: - return self * self_mul + other * other_mul - - # gmp object creation has cumulatively higher overhead than the - # speedup we get from calculating the NAF using gmp so ensure use - # of int() - self_naf = list(reversed(self._naf(int(self_mul)))) - other_naf = list(reversed(self._naf(int(other_mul)))) - # ensure that the lists are the same length (zip() will truncate - # longer one otherwise) - if len(self_naf) < len(other_naf): - self_naf = [0] * (len(other_naf) - len(self_naf)) + self_naf - elif len(self_naf) > len(other_naf): - other_naf = [0] * (len(self_naf) - len(other_naf)) + other_naf - - for A, B in zip(self_naf, other_naf): - X3, Y3, Z3 = _double(X3, Y3, Z3, p, a) - - # conditions ordered from most to least likely - if A == 0: - if B == 0: - pass - elif B < 0: - X3, Y3, Z3 = _add(X3, Y3, Z3, X2, -Y2, Z2, p) - else: - assert B > 0 - X3, Y3, Z3 = _add(X3, Y3, Z3, X2, Y2, Z2, p) - elif A < 0: - if B == 0: - X3, Y3, Z3 = _add(X3, Y3, Z3, X1, -Y1, Z1, p) - elif B < 0: - X3, Y3, Z3 = _add(X3, Y3, Z3, mAmB_X, mAmB_Y, mAmB_Z, p) - else: - assert B > 0 - X3, Y3, Z3 = _add(X3, Y3, Z3, mApB_X, mApB_Y, mApB_Z, p) - else: - assert A > 0 - if B == 0: - X3, Y3, Z3 = _add(X3, Y3, Z3, X1, Y1, Z1, p) - elif B < 0: - X3, Y3, Z3 = _add(X3, Y3, Z3, pAmB_X, pAmB_Y, pAmB_Z, p) - else: - assert B > 0 - X3, Y3, Z3 = _add(X3, Y3, Z3, pApB_X, pApB_Y, pApB_Z, p) - - if not Z3: - return INFINITY - - return PointJacobi(self.__curve, X3, Y3, Z3, self.__order) - - def __neg__(self): - """Return negated point.""" - x, y, z = self.__coords - return PointJacobi(self.__curve, x, -y, z, self.__order) - - -class Point(AbstractPoint): - """A point on a short Weierstrass elliptic curve. Altering x and y is - forbidden, but they can be read by the x() and y() methods.""" - - def __init__(self, curve, x, y, order=None): - """curve, x, y, order; order (optional) is the order of this point.""" - super(Point, self).__init__() - self.__curve = curve - if GMPY: - self.__x = x and mpz(x) - self.__y = y and mpz(y) - self.__order = order and mpz(order) - else: - self.__x = x - self.__y = y - self.__order = order - # self.curve is allowed to be None only for INFINITY: - if self.__curve: - assert self.__curve.contains_point(x, y) - # for curves with cofactor 1, all points that are on the curve are - # scalar multiples of the base point, so performing multiplication is - # not necessary to verify that. See Section 3.2.2.1 of SEC 1 v2 - if curve and curve.cofactor() != 1 and order: - assert self * order == INFINITY - - @classmethod - def from_bytes( - cls, - curve, - data, - validate_encoding=True, - valid_encodings=None, - order=None, - ): - """ - Initialise the object from byte encoding of a point. - - The method does accept and automatically detect the type of point - encoding used. It supports the :term:`raw encoding`, - :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. - - :param data: single point encoding of the public key - :type data: :term:`bytes-like object` - :param curve: the curve on which the public key is expected to lay - :type curve: ~ecdsa.ellipticcurve.CurveFp - :param validate_encoding: whether to verify that the encoding of the - point is self-consistent, defaults to True, has effect only - on ``hybrid`` encoding - :type validate_encoding: bool - :param valid_encodings: list of acceptable point encoding formats, - supported ones are: :term:`uncompressed`, :term:`compressed`, - :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` - name). All formats by default (specified with ``None``). - :type valid_encodings: :term:`set-like object` - :param int order: the point order, must be non zero when using - generator=True - - :raises `~ecdsa.errors.MalformedPointError`: if the public point does - not lay on the curve or the encoding is invalid - - :return: Point on curve - :rtype: Point - """ - coord_x, coord_y = super(Point, cls).from_bytes( - curve, data, validate_encoding, valid_encodings - ) - return Point(curve, coord_x, coord_y, order) - - def __eq__(self, other): - """Return True if the points are identical, False otherwise. - - Note: only points that lay on the same curve can be equal. - """ - if other is INFINITY: - return self.__x is None or self.__y is None - if isinstance(other, Point): - return ( - self.__curve == other.__curve - and self.__x == other.__x - and self.__y == other.__y - ) - return NotImplemented - - def __ne__(self, other): - """Returns False if points are identical, True otherwise.""" - return not self == other - - def __neg__(self): - return Point(self.__curve, self.__x, self.__curve.p() - self.__y) - - def __add__(self, other): - """Add one point to another point.""" - - # X9.62 B.3: - - if not isinstance(other, Point): - return NotImplemented - if other == INFINITY: - return self - if self == INFINITY: - return other - assert self.__curve == other.__curve - if self.__x == other.__x: - if (self.__y + other.__y) % self.__curve.p() == 0: - return INFINITY - else: - return self.double() - - p = self.__curve.p() - - l = ( - (other.__y - self.__y) - * numbertheory.inverse_mod(other.__x - self.__x, p) - ) % p - - x3 = (l * l - self.__x - other.__x) % p - y3 = (l * (self.__x - x3) - self.__y) % p - - return Point(self.__curve, x3, y3) - - def __mul__(self, other): - """Multiply a point by an integer.""" - - def leftmost_bit(x): - assert x > 0 - result = 1 - while result <= x: - result = 2 * result - return result // 2 - - e = other - if e == 0 or (self.__order and e % self.__order == 0): - return INFINITY - if self == INFINITY: - return INFINITY - if e < 0: - return (-self) * (-e) - - # From X9.62 D.3.2: - - e3 = 3 * e - negative_self = Point( - self.__curve, - self.__x, - (-self.__y) % self.__curve.p(), - self.__order, - ) - i = leftmost_bit(e3) // 2 - result = self - # print("Multiplying %s by %d (e3 = %d):" % (self, other, e3)) - while i > 1: - result = result.double() - if (e3 & i) != 0 and (e & i) == 0: - result = result + self - if (e3 & i) == 0 and (e & i) != 0: - result = result + negative_self - # print(". . . i = %d, result = %s" % ( i, result )) - i = i // 2 - - return result - - def __rmul__(self, other): - """Multiply a point by an integer.""" - - return self * other - - def __str__(self): - if self == INFINITY: - return "infinity" - return "(%d,%d)" % (self.__x, self.__y) - - def double(self): - """Return a new point that is twice the old.""" - if self == INFINITY: - return INFINITY - - # X9.62 B.3: - - p = self.__curve.p() - a = self.__curve.a() - - l = ( - (3 * self.__x * self.__x + a) - * numbertheory.inverse_mod(2 * self.__y, p) - ) % p - - if not l: - return INFINITY - - x3 = (l * l - 2 * self.__x) % p - y3 = (l * (self.__x - x3) - self.__y) % p - - return Point(self.__curve, x3, y3) - - def x(self): - return self.__x - - def y(self): - return self.__y - - def curve(self): - return self.__curve - - def order(self): - return self.__order - - -class PointEdwards(AbstractPoint): - """Point on Twisted Edwards curve. - - Internally represents the coordinates on the curve using four parameters, - X, Y, Z, T. They correspond to affine parameters 'x' and 'y' like so: - - x = X / Z - y = Y / Z - x*y = T / Z - """ - - def __init__(self, curve, x, y, z, t, order=None, generator=False): - """ - Initialise a point that uses the extended coordinates internally. - """ - super(PointEdwards, self).__init__() - self.__curve = curve - if GMPY: # pragma: no branch - self.__coords = (mpz(x), mpz(y), mpz(z), mpz(t)) - self.__order = order and mpz(order) - else: # pragma: no branch - self.__coords = (x, y, z, t) - self.__order = order - self.__generator = generator - self.__precompute = [] - - @classmethod - def from_bytes( - cls, - curve, - data, - validate_encoding=None, - valid_encodings=None, - order=None, - generator=False, - ): - """ - Initialise the object from byte encoding of a point. - - `validate_encoding` and `valid_encodings` are provided for - compatibility with Weierstrass curves, they are ignored for Edwards - points. - - :param data: single point encoding of the public key - :type data: :term:`bytes-like object` - :param curve: the curve on which the public key is expected to lay - :type curve: ecdsa.ellipticcurve.CurveEdTw - :param None validate_encoding: Ignored, encoding is always validated - :param None valid_encodings: Ignored, there is just one encoding - supported - :param int order: the point order, must be non zero when using - generator=True - :param bool generator: Flag to mark the point as a curve generator, - this will cause the library to pre-compute some values to - make repeated usages of the point much faster - - :raises `~ecdsa.errors.MalformedPointError`: if the public point does - not lay on the curve or the encoding is invalid - - :return: Initialised point on an Edwards curve - :rtype: PointEdwards - """ - coord_x, coord_y = super(PointEdwards, cls).from_bytes( - curve, data, validate_encoding, valid_encodings - ) - return PointEdwards( - curve, coord_x, coord_y, 1, coord_x * coord_y, order, generator - ) - - def _maybe_precompute(self): - if not self.__generator or self.__precompute: - return self.__precompute - - # since this code will execute just once, and it's fully deterministic, - # depend on atomicity of the last assignment to switch from empty - # self.__precompute to filled one and just ignore the unlikely - # situation when two threads execute it at the same time (as it won't - # lead to inconsistent __precompute) - order = self.__order - assert order - precompute = [] - i = 1 - order *= 2 - coord_x, coord_y, coord_z, coord_t = self.__coords - prime = self.__curve.p() - - doubler = PointEdwards( - self.__curve, coord_x, coord_y, coord_z, coord_t, order - ) - # for "protection" against Minerva we need 1 or 2 more bits depending - # on order bit size, but it's easier to just calculate one - # point more always - order *= 4 - - while i < order: - doubler = doubler.scale() - coord_x, coord_y = doubler.x(), doubler.y() - coord_t = coord_x * coord_y % prime - precompute.append((coord_x, coord_y, coord_t)) - - i *= 2 - doubler = doubler.double() - - self.__precompute = precompute - return self.__precompute - - def x(self): - """Return affine x coordinate.""" - X1, _, Z1, _ = self.__coords - if Z1 == 1: - return X1 - p = self.__curve.p() - z_inv = numbertheory.inverse_mod(Z1, p) - return X1 * z_inv % p - - def y(self): - """Return affine y coordinate.""" - _, Y1, Z1, _ = self.__coords - if Z1 == 1: - return Y1 - p = self.__curve.p() - z_inv = numbertheory.inverse_mod(Z1, p) - return Y1 * z_inv % p - - def curve(self): - """Return the curve of the point.""" - return self.__curve - - def order(self): - return self.__order - - def scale(self): - """ - Return point scaled so that z == 1. - - Modifies point in place, returns self. - """ - X1, Y1, Z1, _ = self.__coords - if Z1 == 1: - return self - - p = self.__curve.p() - z_inv = numbertheory.inverse_mod(Z1, p) - x = X1 * z_inv % p - y = Y1 * z_inv % p - t = x * y % p - self.__coords = (x, y, 1, t) - return self - - def __eq__(self, other): - """Compare for equality two points with each-other. - - Note: only points on the same curve can be equal. - """ - x1, y1, z1, t1 = self.__coords - if other is INFINITY: - return not x1 or not t1 - if isinstance(other, PointEdwards): - x2, y2, z2, t2 = other.__coords - else: - return NotImplemented - if self.__curve != other.curve(): - return False - p = self.__curve.p() - - # cross multiply to eliminate divisions - xn1 = x1 * z2 % p - xn2 = x2 * z1 % p - yn1 = y1 * z2 % p - yn2 = y2 * z1 % p - return xn1 == xn2 and yn1 == yn2 - - def __ne__(self, other): - """Compare for inequality two points with each-other.""" - return not self == other - - def _add(self, X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a): - """add two points, assume sane parameters.""" - # after add-2008-hwcd-2 - # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html - # NOTE: there are more efficient formulas for Z1 or Z2 == 1 - A = X1 * X2 % p - B = Y1 * Y2 % p - C = Z1 * T2 % p - D = T1 * Z2 % p - E = D + C - F = ((X1 - Y1) * (X2 + Y2) + B - A) % p - G = B + a * A - H = D - C - if not H: - return self._double(X1, Y1, Z1, T1, p, a) - X3 = E * F % p - Y3 = G * H % p - T3 = E * H % p - Z3 = F * G % p - - return X3, Y3, Z3, T3 - - def __add__(self, other): - """Add point to another.""" - if other == INFINITY: - return self - if ( - not isinstance(other, PointEdwards) - or self.__curve != other.__curve - ): - raise ValueError("The other point is on a different curve.") - - p, a = self.__curve.p(), self.__curve.a() - X1, Y1, Z1, T1 = self.__coords - X2, Y2, Z2, T2 = other.__coords - - X3, Y3, Z3, T3 = self._add(X1, Y1, Z1, T1, X2, Y2, Z2, T2, p, a) - - if not X3 or not T3: - return INFINITY - return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) - - def __radd__(self, other): - """Add other to self.""" - return self + other - - def _double(self, X1, Y1, Z1, T1, p, a): - """Double the point, assume sane parameters.""" - # after "dbl-2008-hwcd" - # from https://hyperelliptic.org/EFD/g1p/auto-twisted-extended.html - # NOTE: there are more efficient formulas for Z1 == 1 - A = X1 * X1 % p - B = Y1 * Y1 % p - C = 2 * Z1 * Z1 % p - D = a * A % p - E = ((X1 + Y1) * (X1 + Y1) - A - B) % p - G = D + B - F = G - C - H = D - B - X3 = E * F % p - Y3 = G * H % p - T3 = E * H % p - Z3 = F * G % p - - return X3, Y3, Z3, T3 - - def double(self): - """Return point added to itself.""" - X1, Y1, Z1, T1 = self.__coords - - if not X1 or not T1: - return INFINITY - - p, a = self.__curve.p(), self.__curve.a() - - X3, Y3, Z3, T3 = self._double(X1, Y1, Z1, T1, p, a) - - # both Ed25519 and Ed448 have prime order, so no point added to - # itself will equal zero - if not X3 or not T3: # pragma: no branch - return INFINITY - return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) - - def __rmul__(self, other): - """Multiply point by an integer.""" - return self * other - - def _mul_precompute(self, other): - """Multiply point by integer with precomputation table.""" - X3, Y3, Z3, T3, p, a = 0, 1, 1, 0, self.__curve.p(), self.__curve.a() - _add = self._add - for X2, Y2, T2 in self.__precompute: - rem = other % 4 - if rem == 0 or rem == 2: - other //= 2 - elif rem == 3: - other = (other + 1) // 2 - X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, 1, -T2, p, a) - else: - assert rem == 1 - other = (other - 1) // 2 - X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, 1, T2, p, a) - - if not X3 or not T3: - return INFINITY - - return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) - - def __mul__(self, other): - """Multiply point by an integer.""" - X2, Y2, Z2, T2 = self.__coords - if not X2 or not T2 or not other: - return INFINITY - if other == 1: - return self - if self.__order: - # order*2 as a "protection" for Minerva - other = other % (self.__order * 2) - if self._maybe_precompute(): - return self._mul_precompute(other) - - X3, Y3, Z3, T3 = 0, 1, 1, 0 # INFINITY in extended coordinates - p, a = self.__curve.p(), self.__curve.a() - _double = self._double - _add = self._add - - for i in reversed(self._naf(other)): - X3, Y3, Z3, T3 = _double(X3, Y3, Z3, T3, p, a) - if i < 0: - X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, -X2, Y2, Z2, -T2, p, a) - elif i > 0: - X3, Y3, Z3, T3 = _add(X3, Y3, Z3, T3, X2, Y2, Z2, T2, p, a) - - if not X3 or not T3: - return INFINITY - - return PointEdwards(self.__curve, X3, Y3, Z3, T3, self.__order) - - -# This one point is the Point At Infinity for all purposes: -INFINITY = Point(None, None, None) diff --git a/venv/lib/python3.12/site-packages/ecdsa/errors.py b/venv/lib/python3.12/site-packages/ecdsa/errors.py deleted file mode 100644 index 0184c05b..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/errors.py +++ /dev/null @@ -1,4 +0,0 @@ -class MalformedPointError(AssertionError): - """Raised in case the encoding of private or public key is malformed.""" - - pass diff --git a/venv/lib/python3.12/site-packages/ecdsa/keys.py b/venv/lib/python3.12/site-packages/ecdsa/keys.py deleted file mode 100644 index f74252c7..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/keys.py +++ /dev/null @@ -1,1631 +0,0 @@ -""" -Primary classes for performing signing and verification operations. -""" - -import binascii -from hashlib import sha1 -import os -from six import PY2 -from . import ecdsa, eddsa -from . import der, ssh -from . import rfc6979 -from . import ellipticcurve -from .curves import NIST192p, Curve, Ed25519, Ed448 -from .ecdsa import RSZeroError -from .util import string_to_number, number_to_string, randrange -from .util import sigencode_string, sigdecode_string, bit_length -from .util import ( - oid_ecPublicKey, - encoded_oid_ecPublicKey, - oid_ecDH, - oid_ecMQV, - MalformedSignature, -) -from ._compat import normalise_bytes -from .errors import MalformedPointError -from .ellipticcurve import PointJacobi, CurveEdTw - - -__all__ = [ - "BadSignatureError", - "BadDigestError", - "VerifyingKey", - "SigningKey", - "MalformedPointError", -] - - -class BadSignatureError(Exception): - """ - Raised when verification of signature failed. - - Will be raised irrespective of reason of the failure: - - * the calculated or provided hash does not match the signature - * the signature does not match the curve/public key - * the encoding of the signature is malformed - * the size of the signature does not match the curve of the VerifyingKey - """ - - pass - - -class BadDigestError(Exception): - """Raised in case the selected hash is too large for the curve.""" - - pass - - -def _truncate_and_convert_digest(digest, curve, allow_truncate): - """Truncates and converts digest to an integer.""" - if not allow_truncate: - if len(digest) > curve.baselen: - raise BadDigestError( - "this curve ({0}) is too short " - "for the length of your digest ({1})".format( - curve.name, 8 * len(digest) - ) - ) - else: - digest = digest[: curve.baselen] - number = string_to_number(digest) - if allow_truncate: - max_length = bit_length(curve.order) - # we don't use bit_length(number) as that truncates leading zeros - length = len(digest) * 8 - - # See NIST FIPS 186-4: - # - # When the length of the output of the hash function is greater - # than N (i.e., the bit length of q), then the leftmost N bits of - # the hash function output block shall be used in any calculation - # using the hash function output during the generation or - # verification of a digital signature. - # - # as such, we need to shift-out the low-order bits: - number >>= max(0, length - max_length) - - return number - - -class VerifyingKey(object): - """ - Class for handling keys that can verify signatures (public keys). - - :ivar `~ecdsa.curves.Curve` ~.curve: The Curve over which all the - cryptographic operations will take place - :ivar default_hashfunc: the function that will be used for hashing the - data. Should implement the same API as hashlib.sha1 - :vartype default_hashfunc: callable - :ivar pubkey: the actual public key - :vartype pubkey: ~ecdsa.ecdsa.Public_key - """ - - def __init__(self, _error__please_use_generate=None): - """Unsupported, please use one of the classmethods to initialise.""" - if not _error__please_use_generate: - raise TypeError( - "Please use VerifyingKey.generate() to construct me" - ) - self.curve = None - self.default_hashfunc = None - self.pubkey = None - - def __repr__(self): - pub_key = self.to_string("compressed") - if self.default_hashfunc: - hash_name = self.default_hashfunc().name - else: - hash_name = "None" - return "VerifyingKey.from_string({0!r}, {1!r}, {2})".format( - pub_key, self.curve, hash_name - ) - - def __eq__(self, other): - """Return True if the points are identical, False otherwise.""" - if isinstance(other, VerifyingKey): - return self.curve == other.curve and self.pubkey == other.pubkey - return NotImplemented - - def __ne__(self, other): - """Return False if the points are identical, True otherwise.""" - return not self == other - - @classmethod - def from_public_point( - cls, point, curve=NIST192p, hashfunc=sha1, validate_point=True - ): - """ - Initialise the object from a Point object. - - This is a low-level method, generally you will not want to use it. - - :param point: The point to wrap around, the actual public key - :type point: ~ecdsa.ellipticcurve.AbstractPoint - :param curve: The curve on which the point needs to reside, defaults - to NIST192p - :type curve: ~ecdsa.curves.Curve - :param hashfunc: The default hash function that will be used for - verification, needs to implement the same interface - as :py:class:`hashlib.sha1` - :type hashfunc: callable - :type bool validate_point: whether to check if the point lays on curve - should always be used if the public point is not a result - of our own calculation - - :raises MalformedPointError: if the public point does not lay on the - curve - - :return: Initialised VerifyingKey object - :rtype: VerifyingKey - """ - self = cls(_error__please_use_generate=True) - if isinstance(curve.curve, CurveEdTw): - raise ValueError("Method incompatible with Edwards curves") - if not isinstance(point, ellipticcurve.PointJacobi): - point = ellipticcurve.PointJacobi.from_affine(point) - self.curve = curve - self.default_hashfunc = hashfunc - try: - self.pubkey = ecdsa.Public_key( - curve.generator, point, validate_point - ) - except ecdsa.InvalidPointError: - raise MalformedPointError("Point does not lay on the curve") - self.pubkey.order = curve.order - return self - - def precompute(self, lazy=False): - """ - Precompute multiplication tables for faster signature verification. - - Calling this method will cause the library to precompute the - scalar multiplication tables, used in signature verification. - While it's an expensive operation (comparable to performing - as many signatures as the bit size of the curve, i.e. 256 for NIST256p) - it speeds up verification 2 times. You should call this method - if you expect to verify hundreds of signatures (or more) using the same - VerifyingKey object. - - Note: You should call this method only once, this method generates a - new precomputation table every time it's called. - - :param bool lazy: whether to calculate the precomputation table now - (if set to False) or if it should be delayed to the time of first - use (when set to True) - """ - if isinstance(self.curve.curve, CurveEdTw): - pt = self.pubkey.point - self.pubkey.point = ellipticcurve.PointEdwards( - pt.curve(), - pt.x(), - pt.y(), - 1, - pt.x() * pt.y(), - self.curve.order, - generator=True, - ) - else: - self.pubkey.point = ellipticcurve.PointJacobi.from_affine( - self.pubkey.point, True - ) - # as precomputation in now delayed to the time of first use of the - # point and we were asked specifically to precompute now, make - # sure the precomputation is performed now to preserve the behaviour - if not lazy: - self.pubkey.point * 2 - - @classmethod - def from_string( - cls, - string, - curve=NIST192p, - hashfunc=sha1, - validate_point=True, - valid_encodings=None, - ): - """ - Initialise the object from byte encoding of public key. - - The method does accept and automatically detect the type of point - encoding used. It supports the :term:`raw encoding`, - :term:`uncompressed`, :term:`compressed`, and :term:`hybrid` encodings. - It also works with the native encoding of Ed25519 and Ed448 public - keys (technically those are compressed, but encoded differently than - in other signature systems). - - Note, while the method is named "from_string" it's a misnomer from - Python 2 days when there were no binary strings. In Python 3 the - input needs to be a bytes-like object. - - :param string: single point encoding of the public key - :type string: :term:`bytes-like object` - :param curve: the curve on which the public key is expected to lay - :type curve: ~ecdsa.curves.Curve - :param hashfunc: The default hash function that will be used for - verification, needs to implement the same interface as - hashlib.sha1. Ignored for EdDSA. - :type hashfunc: callable - :param validate_point: whether to verify that the point lays on the - provided curve or not, defaults to True. Ignored for EdDSA. - :type validate_point: bool - :param valid_encodings: list of acceptable point encoding formats, - supported ones are: :term:`uncompressed`, :term:`compressed`, - :term:`hybrid`, and :term:`raw encoding` (specified with ``raw`` - name). All formats by default (specified with ``None``). - Ignored for EdDSA. - :type valid_encodings: :term:`set-like object` - - :raises MalformedPointError: if the public point does not lay on the - curve or the encoding is invalid - - :return: Initialised VerifyingKey object - :rtype: VerifyingKey - """ - if isinstance(curve.curve, CurveEdTw): - self = cls(_error__please_use_generate=True) - self.curve = curve - self.default_hashfunc = None # ignored for EdDSA - try: - self.pubkey = eddsa.PublicKey(curve.generator, string) - except ValueError: - raise MalformedPointError("Malformed point for the curve") - return self - - point = PointJacobi.from_bytes( - curve.curve, - string, - validate_encoding=validate_point, - valid_encodings=valid_encodings, - ) - return cls.from_public_point(point, curve, hashfunc, validate_point) - - @classmethod - def from_pem( - cls, - string, - hashfunc=sha1, - valid_encodings=None, - valid_curve_encodings=None, - ): - """ - Initialise from public key stored in :term:`PEM` format. - - The PEM header of the key should be ``BEGIN PUBLIC KEY``. - - See the :func:`~VerifyingKey.from_der()` method for details of the - format supported. - - Note: only a single PEM object decoding is supported in provided - string. - - :param string: text with PEM-encoded public ECDSA key - :type string: str - :param valid_encodings: list of allowed point encodings. - By default :term:`uncompressed`, :term:`compressed`, and - :term:`hybrid`. To read malformed files, include - :term:`raw encoding` with ``raw`` in the list. - :type valid_encodings: :term:`set-like object` - :param valid_curve_encodings: list of allowed encoding formats - for curve parameters. By default (``None``) all are supported: - ``named_curve`` and ``explicit``. - :type valid_curve_encodings: :term:`set-like object` - - - :return: Initialised VerifyingKey object - :rtype: VerifyingKey - """ - return cls.from_der( - der.unpem(string), - hashfunc=hashfunc, - valid_encodings=valid_encodings, - valid_curve_encodings=valid_curve_encodings, - ) - - @classmethod - def from_der( - cls, - string, - hashfunc=sha1, - valid_encodings=None, - valid_curve_encodings=None, - ): - """ - Initialise the key stored in :term:`DER` format. - - The expected format of the key is the SubjectPublicKeyInfo structure - from RFC5912 (for RSA keys, it's known as the PKCS#1 format):: - - SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE { - algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}}, - subjectPublicKey BIT STRING - } - - Note: only public EC keys are supported by this method. The - SubjectPublicKeyInfo.algorithm.algorithm field must specify - id-ecPublicKey (see RFC3279). - - Only the named curve encoding is supported, thus the - SubjectPublicKeyInfo.algorithm.parameters field needs to be an - object identifier. A sequence in that field indicates an explicit - parameter curve encoding, this format is not supported. A NULL object - in that field indicates an "implicitlyCA" encoding, where the curve - parameters come from CA certificate, those, again, are not supported. - - :param string: binary string with the DER encoding of public ECDSA key - :type string: bytes-like object - :param valid_encodings: list of allowed point encodings. - By default :term:`uncompressed`, :term:`compressed`, and - :term:`hybrid`. To read malformed files, include - :term:`raw encoding` with ``raw`` in the list. - :type valid_encodings: :term:`set-like object` - :param valid_curve_encodings: list of allowed encoding formats - for curve parameters. By default (``None``) all are supported: - ``named_curve`` and ``explicit``. - :type valid_curve_encodings: :term:`set-like object` - - :return: Initialised VerifyingKey object - :rtype: VerifyingKey - """ - if valid_encodings is None: - valid_encodings = set(["uncompressed", "compressed", "hybrid"]) - string = normalise_bytes(string) - # [[oid_ecPublicKey,oid_curve], point_str_bitstring] - s1, empty = der.remove_sequence(string) - if empty != b"": - raise der.UnexpectedDER( - "trailing junk after DER pubkey: %s" % binascii.hexlify(empty) - ) - s2, point_str_bitstring = der.remove_sequence(s1) - # s2 = oid_ecPublicKey,oid_curve - oid_pk, rest = der.remove_object(s2) - if oid_pk in (Ed25519.oid, Ed448.oid): - if oid_pk == Ed25519.oid: - curve = Ed25519 - else: - assert oid_pk == Ed448.oid - curve = Ed448 - point_str, empty = der.remove_bitstring(point_str_bitstring, 0) - if empty: - raise der.UnexpectedDER("trailing junk after public key") - return cls.from_string(point_str, curve, None) - if not oid_pk == oid_ecPublicKey: - raise der.UnexpectedDER( - "Unexpected object identifier in DER " - "encoding: {0!r}".format(oid_pk) - ) - curve = Curve.from_der(rest, valid_curve_encodings) - point_str, empty = der.remove_bitstring(point_str_bitstring, 0) - if empty != b"": - raise der.UnexpectedDER( - "trailing junk after pubkey pointstring: %s" - % binascii.hexlify(empty) - ) - # raw encoding of point is invalid in DER files - if len(point_str) == curve.verifying_key_length: - raise der.UnexpectedDER("Malformed encoding of public point") - return cls.from_string( - point_str, - curve, - hashfunc=hashfunc, - valid_encodings=valid_encodings, - ) - - @classmethod - def from_public_key_recovery( - cls, - signature, - data, - curve, - hashfunc=sha1, - sigdecode=sigdecode_string, - allow_truncate=True, - ): - """ - Return keys that can be used as verifiers of the provided signature. - - Tries to recover the public key that can be used to verify the - signature, usually returns two keys like that. - - :param signature: the byte string with the encoded signature - :type signature: bytes-like object - :param data: the data to be hashed for signature verification - :type data: bytes-like object - :param curve: the curve over which the signature was performed - :type curve: ~ecdsa.curves.Curve - :param hashfunc: The default hash function that will be used for - verification, needs to implement the same interface as hashlib.sha1 - :type hashfunc: callable - :param sigdecode: Callable to define the way the signature needs to - be decoded to an object, needs to handle `signature` as the - first parameter, the curve order (an int) as the second and return - a tuple with two integers, "r" as the first one and "s" as the - second one. See :func:`ecdsa.util.sigdecode_string` and - :func:`ecdsa.util.sigdecode_der` for examples. - :param bool allow_truncate: if True, the provided hashfunc can generate - values larger than the bit size of the order of the curve, the - extra bits (at the end of the digest) will be truncated. - :type sigdecode: callable - - :return: Initialised VerifyingKey objects - :rtype: list of VerifyingKey - """ - if isinstance(curve.curve, CurveEdTw): - raise ValueError("Method unsupported for Edwards curves") - data = normalise_bytes(data) - digest = hashfunc(data).digest() - return cls.from_public_key_recovery_with_digest( - signature, - digest, - curve, - hashfunc=hashfunc, - sigdecode=sigdecode, - allow_truncate=allow_truncate, - ) - - @classmethod - def from_public_key_recovery_with_digest( - cls, - signature, - digest, - curve, - hashfunc=sha1, - sigdecode=sigdecode_string, - allow_truncate=False, - ): - """ - Return keys that can be used as verifiers of the provided signature. - - Tries to recover the public key that can be used to verify the - signature, usually returns two keys like that. - - :param signature: the byte string with the encoded signature - :type signature: bytes-like object - :param digest: the hash value of the message signed by the signature - :type digest: bytes-like object - :param curve: the curve over which the signature was performed - :type curve: ~ecdsa.curves.Curve - :param hashfunc: The default hash function that will be used for - verification, needs to implement the same interface as hashlib.sha1 - :type hashfunc: callable - :param sigdecode: Callable to define the way the signature needs to - be decoded to an object, needs to handle `signature` as the - first parameter, the curve order (an int) as the second and return - a tuple with two integers, "r" as the first one and "s" as the - second one. See :func:`ecdsa.util.sigdecode_string` and - :func:`ecdsa.util.sigdecode_der` for examples. - :type sigdecode: callable - :param bool allow_truncate: if True, the provided hashfunc can generate - values larger than the bit size of the order of the curve (and - the length of provided `digest`), the extra bits (at the end of the - digest) will be truncated. - - :return: Initialised VerifyingKey object - :rtype: VerifyingKey - """ - if isinstance(curve.curve, CurveEdTw): - raise ValueError("Method unsupported for Edwards curves") - generator = curve.generator - r, s = sigdecode(signature, generator.order()) - sig = ecdsa.Signature(r, s) - - digest = normalise_bytes(digest) - digest_as_number = _truncate_and_convert_digest( - digest, curve, allow_truncate - ) - pks = sig.recover_public_keys(digest_as_number, generator) - - # Transforms the ecdsa.Public_key object into a VerifyingKey - verifying_keys = [ - cls.from_public_point(pk.point, curve, hashfunc) for pk in pks - ] - return verifying_keys - - def to_string(self, encoding="raw"): - """ - Convert the public key to a byte string. - - The method by default uses the :term:`raw encoding` (specified - by `encoding="raw"`. It can also output keys in :term:`uncompressed`, - :term:`compressed` and :term:`hybrid` formats. - - Remember that the curve identification is not part of the encoding - so to decode the point using :func:`~VerifyingKey.from_string`, curve - needs to be specified. - - Note: while the method is called "to_string", it's a misnomer from - Python 2 days when character strings and byte strings shared type. - On Python 3 the returned type will be `bytes`. - - :return: :term:`raw encoding` of the public key (public point) on the - curve - :rtype: bytes - """ - assert encoding in ("raw", "uncompressed", "compressed", "hybrid") - return self.pubkey.point.to_bytes(encoding) - - def to_pem( - self, point_encoding="uncompressed", curve_parameters_encoding=None - ): - """ - Convert the public key to the :term:`PEM` format. - - The PEM header of the key will be ``BEGIN PUBLIC KEY``. - - The format of the key is described in the - :func:`~VerifyingKey.from_der()` method. - This method supports only "named curve" encoding of keys. - - :param str point_encoding: specification of the encoding format - of public keys. "uncompressed" is most portable, "compressed" is - smallest. "hybrid" is uncommon and unsupported by most - implementations, it is as big as "uncompressed". - :param str curve_parameters_encoding: the encoding for curve parameters - to use, by default tries to use ``named_curve`` encoding, - if that is not possible, falls back to ``explicit`` encoding. - - :return: portable encoding of the public key - :rtype: bytes - - .. warning:: The PEM is encoded to US-ASCII, it needs to be - re-encoded if the system is incompatible (e.g. uses UTF-16) - """ - return der.topem( - self.to_der(point_encoding, curve_parameters_encoding), - "PUBLIC KEY", - ) - - def to_der( - self, point_encoding="uncompressed", curve_parameters_encoding=None - ): - """ - Convert the public key to the :term:`DER` format. - - The format of the key is described in the - :func:`~VerifyingKey.from_der()` method. - This method supports only "named curve" encoding of keys. - - :param str point_encoding: specification of the encoding format - of public keys. "uncompressed" is most portable, "compressed" is - smallest. "hybrid" is uncommon and unsupported by most - implementations, it is as big as "uncompressed". - :param str curve_parameters_encoding: the encoding for curve parameters - to use, by default tries to use ``named_curve`` encoding, - if that is not possible, falls back to ``explicit`` encoding. - - :return: DER encoding of the public key - :rtype: bytes - """ - if point_encoding == "raw": - raise ValueError("raw point_encoding not allowed in DER") - point_str = self.to_string(point_encoding) - if isinstance(self.curve.curve, CurveEdTw): - return der.encode_sequence( - der.encode_sequence(der.encode_oid(*self.curve.oid)), - der.encode_bitstring(bytes(point_str), 0), - ) - return der.encode_sequence( - der.encode_sequence( - encoded_oid_ecPublicKey, - self.curve.to_der(curve_parameters_encoding, point_encoding), - ), - # 0 is the number of unused bits in the - # bit string - der.encode_bitstring(point_str, 0), - ) - - def to_ssh(self): - """ - Convert the public key to the SSH format. - - :return: SSH encoding of the public key - :rtype: bytes - """ - return ssh.serialize_public( - self.curve.name, - self.to_string(), - ) - - def verify( - self, - signature, - data, - hashfunc=None, - sigdecode=sigdecode_string, - allow_truncate=True, - ): - """ - Verify a signature made over provided data. - - Will hash `data` to verify the signature. - - By default expects signature in :term:`raw encoding`. Can also be used - to verify signatures in ASN.1 DER encoding by using - :func:`ecdsa.util.sigdecode_der` - as the `sigdecode` parameter. - - :param signature: encoding of the signature - :type signature: sigdecode method dependent - :param data: data signed by the `signature`, will be hashed using - `hashfunc`, if specified, or default hash function - :type data: :term:`bytes-like object` - :param hashfunc: The default hash function that will be used for - verification, needs to implement the same interface as hashlib.sha1 - :type hashfunc: callable - :param sigdecode: Callable to define the way the signature needs to - be decoded to an object, needs to handle `signature` as the - first parameter, the curve order (an int) as the second and return - a tuple with two integers, "r" as the first one and "s" as the - second one. See :func:`ecdsa.util.sigdecode_string` and - :func:`ecdsa.util.sigdecode_der` for examples. - :type sigdecode: callable - :param bool allow_truncate: if True, the provided digest can have - bigger bit-size than the order of the curve, the extra bits (at - the end of the digest) will be truncated. Use it when verifying - SHA-384 output using NIST256p or in similar situations. Defaults to - True. - - :raises BadSignatureError: if the signature is invalid or malformed - - :return: True if the verification was successful - :rtype: bool - """ - # signature doesn't have to be a bytes-like-object so don't normalise - # it, the decoders will do that - data = normalise_bytes(data) - if isinstance(self.curve.curve, CurveEdTw): - signature = normalise_bytes(signature) - try: - return self.pubkey.verify(data, signature) - except (ValueError, MalformedPointError) as e: - raise BadSignatureError("Signature verification failed", e) - - hashfunc = hashfunc or self.default_hashfunc - digest = hashfunc(data).digest() - return self.verify_digest(signature, digest, sigdecode, allow_truncate) - - def verify_digest( - self, - signature, - digest, - sigdecode=sigdecode_string, - allow_truncate=False, - ): - """ - Verify a signature made over provided hash value. - - By default expects signature in :term:`raw encoding`. Can also be used - to verify signatures in ASN.1 DER encoding by using - :func:`ecdsa.util.sigdecode_der` - as the `sigdecode` parameter. - - :param signature: encoding of the signature - :type signature: sigdecode method dependent - :param digest: raw hash value that the signature authenticates. - :type digest: :term:`bytes-like object` - :param sigdecode: Callable to define the way the signature needs to - be decoded to an object, needs to handle `signature` as the - first parameter, the curve order (an int) as the second and return - a tuple with two integers, "r" as the first one and "s" as the - second one. See :func:`ecdsa.util.sigdecode_string` and - :func:`ecdsa.util.sigdecode_der` for examples. - :type sigdecode: callable - :param bool allow_truncate: if True, the provided digest can have - bigger bit-size than the order of the curve, the extra bits (at - the end of the digest) will be truncated. Use it when verifying - SHA-384 output using NIST256p or in similar situations. - - :raises BadSignatureError: if the signature is invalid or malformed - :raises BadDigestError: if the provided digest is too big for the curve - associated with this VerifyingKey and allow_truncate was not set - - :return: True if the verification was successful - :rtype: bool - """ - # signature doesn't have to be a bytes-like-object so don't normalise - # it, the decoders will do that - digest = normalise_bytes(digest) - number = _truncate_and_convert_digest( - digest, - self.curve, - allow_truncate, - ) - - try: - r, s = sigdecode(signature, self.pubkey.order) - except (der.UnexpectedDER, MalformedSignature) as e: - raise BadSignatureError("Malformed formatting of signature", e) - sig = ecdsa.Signature(r, s) - if self.pubkey.verifies(number, sig): - return True - raise BadSignatureError("Signature verification failed") - - -class SigningKey(object): - """ - Class for handling keys that can create signatures (private keys). - - :ivar `~ecdsa.curves.Curve` curve: The Curve over which all the - cryptographic operations will take place - :ivar default_hashfunc: the function that will be used for hashing the - data. Should implement the same API as :py:class:`hashlib.sha1` - :ivar int baselen: the length of a :term:`raw encoding` of private key - :ivar `~ecdsa.keys.VerifyingKey` verifying_key: the public key - associated with this private key - :ivar `~ecdsa.ecdsa.Private_key` privkey: the actual private key - """ - - def __init__(self, _error__please_use_generate=None): - """Unsupported, please use one of the classmethods to initialise.""" - if not _error__please_use_generate: - raise TypeError("Please use SigningKey.generate() to construct me") - self.curve = None - self.default_hashfunc = None - self.baselen = None - self.verifying_key = None - self.privkey = None - - def __eq__(self, other): - """Return True if the points are identical, False otherwise.""" - if isinstance(other, SigningKey): - return ( - self.curve == other.curve - and self.verifying_key == other.verifying_key - and self.privkey == other.privkey - ) - return NotImplemented - - def __ne__(self, other): - """Return False if the points are identical, True otherwise.""" - return not self == other - - @classmethod - def _twisted_edwards_keygen(cls, curve, entropy): - """Generate a private key on a Twisted Edwards curve.""" - if not entropy: - entropy = os.urandom - random = entropy(curve.baselen) - private_key = eddsa.PrivateKey(curve.generator, random) - public_key = private_key.public_key() - - verifying_key = VerifyingKey.from_string( - public_key.public_key(), curve - ) - - self = cls(_error__please_use_generate=True) - self.curve = curve - self.default_hashfunc = None - self.baselen = curve.baselen - self.privkey = private_key - self.verifying_key = verifying_key - return self - - @classmethod - def _weierstrass_keygen(cls, curve, entropy, hashfunc): - """Generate a private key on a Weierstrass curve.""" - secexp = randrange(curve.order, entropy) - return cls.from_secret_exponent(secexp, curve, hashfunc) - - @classmethod - def generate(cls, curve=NIST192p, entropy=None, hashfunc=sha1): - """ - Generate a random private key. - - :param curve: The curve on which the point needs to reside, defaults - to NIST192p - :type curve: ~ecdsa.curves.Curve - :param entropy: Source of randomness for generating the private keys, - should provide cryptographically secure random numbers if the keys - need to be secure. Uses os.urandom() by default. - :type entropy: callable - :param hashfunc: The default hash function that will be used for - signing, needs to implement the same interface - as hashlib.sha1 - :type hashfunc: callable - - :return: Initialised SigningKey object - :rtype: SigningKey - """ - if isinstance(curve.curve, CurveEdTw): - return cls._twisted_edwards_keygen(curve, entropy) - return cls._weierstrass_keygen(curve, entropy, hashfunc) - - @classmethod - def from_secret_exponent(cls, secexp, curve=NIST192p, hashfunc=sha1): - """ - Create a private key from a random integer. - - Note: it's a low level method, it's recommended to use the - :func:`~SigningKey.generate` method to create private keys. - - :param int secexp: secret multiplier (the actual private key in ECDSA). - Needs to be an integer between 1 and the curve order. - :param curve: The curve on which the point needs to reside - :type curve: ~ecdsa.curves.Curve - :param hashfunc: The default hash function that will be used for - signing, needs to implement the same interface - as hashlib.sha1 - :type hashfunc: callable - - :raises MalformedPointError: when the provided secexp is too large - or too small for the curve selected - :raises RuntimeError: if the generation of public key from private - key failed - - :return: Initialised SigningKey object - :rtype: SigningKey - """ - if isinstance(curve.curve, CurveEdTw): - raise ValueError( - "Edwards keys don't support setting the secret scalar " - "(exponent) directly" - ) - self = cls(_error__please_use_generate=True) - self.curve = curve - self.default_hashfunc = hashfunc - self.baselen = curve.baselen - n = curve.order - if not 1 <= secexp < n: - raise MalformedPointError( - "Invalid value for secexp, expected integer " - "between 1 and {0}".format(n) - ) - pubkey_point = curve.generator * secexp - if hasattr(pubkey_point, "scale"): - pubkey_point = pubkey_point.scale() - self.verifying_key = VerifyingKey.from_public_point( - pubkey_point, curve, hashfunc, False - ) - pubkey = self.verifying_key.pubkey - self.privkey = ecdsa.Private_key(pubkey, secexp) - self.privkey.order = n - return self - - @classmethod - def from_string(cls, string, curve=NIST192p, hashfunc=sha1): - """ - Decode the private key from :term:`raw encoding`. - - Note: the name of this method is a misnomer coming from days of - Python 2, when binary strings and character strings shared a type. - In Python 3, the expected type is `bytes`. - - :param string: the raw encoding of the private key - :type string: :term:`bytes-like object` - :param curve: The curve on which the point needs to reside - :type curve: ~ecdsa.curves.Curve - :param hashfunc: The default hash function that will be used for - signing, needs to implement the same interface - as hashlib.sha1 - :type hashfunc: callable - - :raises MalformedPointError: if the length of encoding doesn't match - the provided curve or the encoded values is too large - :raises RuntimeError: if the generation of public key from private - key failed - - :return: Initialised SigningKey object - :rtype: SigningKey - """ - string = normalise_bytes(string) - - if len(string) != curve.baselen: - raise MalformedPointError( - "Invalid length of private key, received {0}, " - "expected {1}".format(len(string), curve.baselen) - ) - if isinstance(curve.curve, CurveEdTw): - self = cls(_error__please_use_generate=True) - self.curve = curve - self.default_hashfunc = None # Ignored for EdDSA - self.baselen = curve.baselen - self.privkey = eddsa.PrivateKey(curve.generator, string) - self.verifying_key = VerifyingKey.from_string( - self.privkey.public_key().public_key(), curve - ) - return self - secexp = string_to_number(string) - return cls.from_secret_exponent(secexp, curve, hashfunc) - - @classmethod - def from_pem(cls, string, hashfunc=sha1, valid_curve_encodings=None): - """ - Initialise from key stored in :term:`PEM` format. - - The PEM formats supported are the un-encrypted RFC5915 - (the ssleay format) supported by OpenSSL, and the more common - un-encrypted RFC5958 (the PKCS #8 format). - - The legacy format files have the header with the string - ``BEGIN EC PRIVATE KEY``. - PKCS#8 files have the header ``BEGIN PRIVATE KEY``. - Encrypted files (ones that include the string - ``Proc-Type: 4,ENCRYPTED`` - right after the PEM header) are not supported. - - See :func:`~SigningKey.from_der` for ASN.1 syntax of the objects in - this files. - - :param string: text with PEM-encoded private ECDSA key - :type string: str - :param valid_curve_encodings: list of allowed encoding formats - for curve parameters. By default (``None``) all are supported: - ``named_curve`` and ``explicit``. - :type valid_curve_encodings: :term:`set-like object` - - - :raises MalformedPointError: if the length of encoding doesn't match - the provided curve or the encoded values is too large - :raises RuntimeError: if the generation of public key from private - key failed - :raises UnexpectedDER: if the encoding of the PEM file is incorrect - - :return: Initialised SigningKey object - :rtype: SigningKey - """ - if not PY2 and isinstance(string, str): # pragma: no branch - string = string.encode() - - # The privkey pem may have multiple sections, commonly it also has - # "EC PARAMETERS", we need just "EC PRIVATE KEY". PKCS#8 should not - # have the "EC PARAMETERS" section; it's just "PRIVATE KEY". - private_key_index = string.find(b"-----BEGIN EC PRIVATE KEY-----") - if private_key_index == -1: - private_key_index = string.index(b"-----BEGIN PRIVATE KEY-----") - - return cls.from_der( - der.unpem(string[private_key_index:]), - hashfunc, - valid_curve_encodings, - ) - - @classmethod - def from_der(cls, string, hashfunc=sha1, valid_curve_encodings=None): - """ - Initialise from key stored in :term:`DER` format. - - The DER formats supported are the un-encrypted RFC5915 - (the ssleay format) supported by OpenSSL, and the more common - un-encrypted RFC5958 (the PKCS #8 format). - - Both formats contain an ASN.1 object following the syntax specified - in RFC5915:: - - ECPrivateKey ::= SEQUENCE { - version INTEGER { ecPrivkeyVer1(1) }} (ecPrivkeyVer1), - privateKey OCTET STRING, - parameters [0] ECParameters {{ NamedCurve }} OPTIONAL, - publicKey [1] BIT STRING OPTIONAL - } - - `publicKey` field is ignored completely (errors, if any, in it will - be undetected). - - Two formats are supported for the `parameters` field: the named - curve and the explicit encoding of curve parameters. - In the legacy ssleay format, this implementation requires the optional - `parameters` field to get the curve name. In PKCS #8 format, the curve - is part of the PrivateKeyAlgorithmIdentifier. - - The PKCS #8 format includes an ECPrivateKey object as the `privateKey` - field within a larger structure:: - - OneAsymmetricKey ::= SEQUENCE { - version Version, - privateKeyAlgorithm PrivateKeyAlgorithmIdentifier, - privateKey PrivateKey, - attributes [0] Attributes OPTIONAL, - ..., - [[2: publicKey [1] PublicKey OPTIONAL ]], - ... - } - - The `attributes` and `publicKey` fields are completely ignored; errors - in them will not be detected. - - :param string: binary string with DER-encoded private ECDSA key - :type string: :term:`bytes-like object` - :param valid_curve_encodings: list of allowed encoding formats - for curve parameters. By default (``None``) all are supported: - ``named_curve`` and ``explicit``. - Ignored for EdDSA. - :type valid_curve_encodings: :term:`set-like object` - - :raises MalformedPointError: if the length of encoding doesn't match - the provided curve or the encoded values is too large - :raises RuntimeError: if the generation of public key from private - key failed - :raises UnexpectedDER: if the encoding of the DER file is incorrect - - :return: Initialised SigningKey object - :rtype: SigningKey - """ - s = normalise_bytes(string) - curve = None - - s, empty = der.remove_sequence(s) - if empty != b"": - raise der.UnexpectedDER( - "trailing junk after DER privkey: %s" % binascii.hexlify(empty) - ) - - version, s = der.remove_integer(s) - - # At this point, PKCS #8 has a sequence containing the algorithm - # identifier and the curve identifier. The ssleay format instead has - # an octet string containing the key data, so this is how we can - # distinguish the two formats. - if der.is_sequence(s): - if version not in (0, 1): - raise der.UnexpectedDER( - "expected version '0' or '1' at start of privkey, got %d" - % version - ) - - sequence, s = der.remove_sequence(s) - algorithm_oid, algorithm_identifier = der.remove_object(sequence) - - if algorithm_oid in (Ed25519.oid, Ed448.oid): - if algorithm_identifier: - raise der.UnexpectedDER( - "Non NULL parameters for a EdDSA key" - ) - key_str_der, s = der.remove_octet_string(s) - - # As RFC5958 describe, there are may be optional Attributes - # and Publickey. Don't raise error if something after - # Privatekey - - # TODO parse attributes or validate publickey - # if s: - # raise der.UnexpectedDER( - # "trailing junk inside the privateKey" - # ) - key_str, s = der.remove_octet_string(key_str_der) - if s: - raise der.UnexpectedDER( - "trailing junk after the encoded private key" - ) - - if algorithm_oid == Ed25519.oid: - curve = Ed25519 - else: - assert algorithm_oid == Ed448.oid - curve = Ed448 - - return cls.from_string(key_str, curve, None) - - if algorithm_oid not in (oid_ecPublicKey, oid_ecDH, oid_ecMQV): - raise der.UnexpectedDER( - "unexpected algorithm identifier '%s'" % (algorithm_oid,) - ) - - curve = Curve.from_der(algorithm_identifier, valid_curve_encodings) - - # Up next is an octet string containing an ECPrivateKey. Ignore - # the optional "attributes" and "publicKey" fields that come after. - s, _ = der.remove_octet_string(s) - - # Unpack the ECPrivateKey to get to the key data octet string, - # and rejoin the ssleay parsing path. - s, empty = der.remove_sequence(s) - if empty != b"": - raise der.UnexpectedDER( - "trailing junk after DER privkey: %s" - % binascii.hexlify(empty) - ) - - version, s = der.remove_integer(s) - - # The version of the ECPrivateKey must be 1. - if version != 1: - raise der.UnexpectedDER( - "expected version '1' at start of DER privkey, got %d" - % version - ) - - privkey_str, s = der.remove_octet_string(s) - - if not curve: - tag, curve_oid_str, s = der.remove_constructed(s) - if tag != 0: - raise der.UnexpectedDER( - "expected tag 0 in DER privkey, got %d" % tag - ) - curve = Curve.from_der(curve_oid_str, valid_curve_encodings) - - # we don't actually care about the following fields - # - # tag, pubkey_bitstring, s = der.remove_constructed(s) - # if tag != 1: - # raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d" - # % tag) - # pubkey_str = der.remove_bitstring(pubkey_bitstring, 0) - # if empty != "": - # raise der.UnexpectedDER("trailing junk after DER privkey " - # "pubkeystr: %s" - # % binascii.hexlify(empty)) - - # our from_string method likes fixed-length privkey strings - if len(privkey_str) < curve.baselen: - privkey_str = ( - b"\x00" * (curve.baselen - len(privkey_str)) + privkey_str - ) - return cls.from_string(privkey_str, curve, hashfunc) - - def to_string(self): - """ - Convert the private key to :term:`raw encoding`. - - Note: while the method is named "to_string", its name comes from - Python 2 days, when binary and character strings used the same type. - The type used in Python 3 is `bytes`. - - :return: raw encoding of private key - :rtype: bytes - """ - if isinstance(self.curve.curve, CurveEdTw): - return bytes(self.privkey.private_key) - secexp = self.privkey.secret_multiplier - s = number_to_string(secexp, self.privkey.order) - return s - - def to_pem( - self, - point_encoding="uncompressed", - format="ssleay", - curve_parameters_encoding=None, - ): - """ - Convert the private key to the :term:`PEM` format. - - See :func:`~SigningKey.from_pem` method for format description. - - Only the named curve format is supported. - The public key will be included in generated string. - - The PEM header will specify ``BEGIN EC PRIVATE KEY`` or - ``BEGIN PRIVATE KEY``, depending on the desired format. - - :param str point_encoding: format to use for encoding public point - :param str format: either ``ssleay`` (default) or ``pkcs8`` - :param str curve_parameters_encoding: format of encoded curve - parameters, default depends on the curve, if the curve has - an associated OID, ``named_curve`` format will be used, - if no OID is associated with the curve, the fallback of - ``explicit`` parameters will be used. - - :return: PEM encoded private key - :rtype: bytes - - .. warning:: The PEM is encoded to US-ASCII, it needs to be - re-encoded if the system is incompatible (e.g. uses UTF-16) - """ - # TODO: "BEGIN ECPARAMETERS" - assert format in ("ssleay", "pkcs8") - header = "EC PRIVATE KEY" if format == "ssleay" else "PRIVATE KEY" - return der.topem( - self.to_der(point_encoding, format, curve_parameters_encoding), - header, - ) - - def _encode_eddsa(self): - """Create a PKCS#8 encoding of EdDSA keys.""" - ec_private_key = der.encode_octet_string(self.to_string()) - return der.encode_sequence( - der.encode_integer(0), - der.encode_sequence(der.encode_oid(*self.curve.oid)), - der.encode_octet_string(ec_private_key), - ) - - def to_der( - self, - point_encoding="uncompressed", - format="ssleay", - curve_parameters_encoding=None, - ): - """ - Convert the private key to the :term:`DER` format. - - See :func:`~SigningKey.from_der` method for format specification. - - Only the named curve format is supported. - The public key will be included in the generated string. - - :param str point_encoding: format to use for encoding public point - Ignored for EdDSA - :param str format: either ``ssleay`` (default) or ``pkcs8``. - EdDSA keys require ``pkcs8``. - :param str curve_parameters_encoding: format of encoded curve - parameters, default depends on the curve, if the curve has - an associated OID, ``named_curve`` format will be used, - if no OID is associated with the curve, the fallback of - ``explicit`` parameters will be used. - Ignored for EdDSA. - - :return: DER encoded private key - :rtype: bytes - """ - # SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1), - # cont[1],bitstring]) - if point_encoding == "raw": - raise ValueError("raw encoding not allowed in DER") - assert format in ("ssleay", "pkcs8") - if isinstance(self.curve.curve, CurveEdTw): - if format != "pkcs8": - raise ValueError("Only PKCS#8 format supported for EdDSA keys") - return self._encode_eddsa() - encoded_vk = self.get_verifying_key().to_string(point_encoding) - priv_key_elems = [ - der.encode_integer(1), - der.encode_octet_string(self.to_string()), - ] - if format == "ssleay": - priv_key_elems.append( - der.encode_constructed( - 0, self.curve.to_der(curve_parameters_encoding) - ) - ) - # the 0 in encode_bitstring specifies the number of unused bits - # in the `encoded_vk` string - priv_key_elems.append( - der.encode_constructed(1, der.encode_bitstring(encoded_vk, 0)) - ) - ec_private_key = der.encode_sequence(*priv_key_elems) - - if format == "ssleay": - return ec_private_key - else: - return der.encode_sequence( - # version = 1 means the public key is not present in the - # top-level structure. - der.encode_integer(1), - der.encode_sequence( - der.encode_oid(*oid_ecPublicKey), - self.curve.to_der(curve_parameters_encoding), - ), - der.encode_octet_string(ec_private_key), - ) - - def to_ssh(self): - """ - Convert the private key to the SSH format. - - :return: SSH encoded private key - :rtype: bytes - """ - return ssh.serialize_private( - self.curve.name, - self.verifying_key.to_string(), - self.to_string(), - ) - - def get_verifying_key(self): - """ - Return the VerifyingKey associated with this private key. - - Equivalent to reading the `verifying_key` field of an instance. - - :return: a public key that can be used to verify the signatures made - with this SigningKey - :rtype: VerifyingKey - """ - return self.verifying_key - - def sign_deterministic( - self, - data, - hashfunc=None, - sigencode=sigencode_string, - extra_entropy=b"", - ): - """ - Create signature over data. - - For Weierstrass curves it uses the deterministic RFC6979 algorithm. - For Edwards curves it uses the standard EdDSA algorithm. - - For ECDSA the data will be hashed using the `hashfunc` function before - signing. - For EdDSA the data will be hashed with the hash associated with the - curve (SHA-512 for Ed25519 and SHAKE-256 for Ed448). - - This is the recommended method for performing signatures when hashing - of data is necessary. - - :param data: data to be hashed and computed signature over - :type data: :term:`bytes-like object` - :param hashfunc: hash function to use for computing the signature, - if unspecified, the default hash function selected during - object initialisation will be used (see - `VerifyingKey.default_hashfunc`). The object needs to implement - the same interface as hashlib.sha1. - Ignored with EdDSA. - :type hashfunc: callable - :param sigencode: function used to encode the signature. - The function needs to accept three parameters: the two integers - that are the signature and the order of the curve over which the - signature was computed. It needs to return an encoded signature. - See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` - as examples of such functions. - Ignored with EdDSA. - :type sigencode: callable - :param extra_entropy: additional data that will be fed into the random - number generator used in the RFC6979 process. Entirely optional. - Ignored with EdDSA. - :type extra_entropy: :term:`bytes-like object` - - :return: encoded signature over `data` - :rtype: bytes or sigencode function dependent type - """ - hashfunc = hashfunc or self.default_hashfunc - data = normalise_bytes(data) - - if isinstance(self.curve.curve, CurveEdTw): - return self.privkey.sign(data) - - extra_entropy = normalise_bytes(extra_entropy) - digest = hashfunc(data).digest() - - return self.sign_digest_deterministic( - digest, - hashfunc=hashfunc, - sigencode=sigencode, - extra_entropy=extra_entropy, - allow_truncate=True, - ) - - def sign_digest_deterministic( - self, - digest, - hashfunc=None, - sigencode=sigencode_string, - extra_entropy=b"", - allow_truncate=False, - ): - """ - Create signature for digest using the deterministic RFC6979 algorithm. - - `digest` should be the output of cryptographically secure hash function - like SHA256 or SHA-3-256. - - This is the recommended method for performing signatures when no - hashing of data is necessary. - - :param digest: hash of data that will be signed - :type digest: :term:`bytes-like object` - :param hashfunc: hash function to use for computing the random "k" - value from RFC6979 process, - if unspecified, the default hash function selected during - object initialisation will be used (see - :attr:`.VerifyingKey.default_hashfunc`). The object needs to - implement - the same interface as :func:`~hashlib.sha1` from :py:mod:`hashlib`. - :type hashfunc: callable - :param sigencode: function used to encode the signature. - The function needs to accept three parameters: the two integers - that are the signature and the order of the curve over which the - signature was computed. It needs to return an encoded signature. - See :func:`~ecdsa.util.sigencode_string` and - :func:`~ecdsa.util.sigencode_der` - as examples of such functions. - :type sigencode: callable - :param extra_entropy: additional data that will be fed into the random - number generator used in the RFC6979 process. Entirely optional. - :type extra_entropy: :term:`bytes-like object` - :param bool allow_truncate: if True, the provided digest can have - bigger bit-size than the order of the curve, the extra bits (at - the end of the digest) will be truncated. Use it when signing - SHA-384 output using NIST256p or in similar situations. - - :return: encoded signature for the `digest` hash - :rtype: bytes or sigencode function dependent type - """ - if isinstance(self.curve.curve, CurveEdTw): - raise ValueError("Method unsupported for Edwards curves") - secexp = self.privkey.secret_multiplier - hashfunc = hashfunc or self.default_hashfunc - digest = normalise_bytes(digest) - extra_entropy = normalise_bytes(extra_entropy) - - def simple_r_s(r, s, order): - return r, s, order - - retry_gen = 0 - while True: - k = rfc6979.generate_k( - self.curve.generator.order(), - secexp, - hashfunc, - digest, - retry_gen=retry_gen, - extra_entropy=extra_entropy, - ) - try: - r, s, order = self.sign_digest( - digest, - sigencode=simple_r_s, - k=k, - allow_truncate=allow_truncate, - ) - break - except RSZeroError: - retry_gen += 1 - - return sigencode(r, s, order) - - def sign( - self, - data, - entropy=None, - hashfunc=None, - sigencode=sigencode_string, - k=None, - allow_truncate=True, - ): - """ - Create signature over data. - - Uses the probabilistic ECDSA algorithm for Weierstrass curves - (NIST256p, etc.) and the deterministic EdDSA algorithm for the - Edwards curves (Ed25519, Ed448). - - This method uses the standard ECDSA algorithm that requires a - cryptographically secure random number generator. - - It's recommended to use the :func:`~SigningKey.sign_deterministic` - method instead of this one. - - :param data: data that will be hashed for signing - :type data: :term:`bytes-like object` - :param callable entropy: randomness source, :func:`os.urandom` by - default. Ignored with EdDSA. - :param hashfunc: hash function to use for hashing the provided - ``data``. - If unspecified the default hash function selected during - object initialisation will be used (see - :attr:`.VerifyingKey.default_hashfunc`). - Should behave like :func:`~hashlib.sha1` from :py:mod:`hashlib`. - The output length of the - hash (in bytes) must not be longer than the length of the curve - order (rounded up to the nearest byte), so using SHA256 with - NIST256p is ok, but SHA256 with NIST192p is not. (In the 2**-96ish - unlikely event of a hash output larger than the curve order, the - hash will effectively be wrapped mod n). - If you want to explicitly allow use of large hashes with small - curves set the ``allow_truncate`` to ``True``. - Use ``hashfunc=hashlib.sha1`` to match openssl's - ``-ecdsa-with-SHA1`` mode, - or ``hashfunc=hashlib.sha256`` for openssl-1.0.0's - ``-ecdsa-with-SHA256``. - Ignored for EdDSA - :type hashfunc: callable - :param sigencode: function used to encode the signature. - The function needs to accept three parameters: the two integers - that are the signature and the order of the curve over which the - signature was computed. It needs to return an encoded signature. - See :func:`~ecdsa.util.sigencode_string` and - :func:`~ecdsa.util.sigencode_der` - as examples of such functions. - Ignored for EdDSA - :type sigencode: callable - :param int k: a pre-selected nonce for calculating the signature. - In typical use cases, it should be set to None (the default) to - allow its generation from an entropy source. - Ignored for EdDSA. - :param bool allow_truncate: if ``True``, the provided digest can have - bigger bit-size than the order of the curve, the extra bits (at - the end of the digest) will be truncated. Use it when signing - SHA-384 output using NIST256p or in similar situations. True by - default. - Ignored for EdDSA. - - :raises RSZeroError: in the unlikely event when *r* parameter or - *s* parameter of the created signature is equal 0, as that would - leak the key. Caller should try a better entropy source, retry with - different ``k``, or use the - :func:`~SigningKey.sign_deterministic` in such case. - - :return: encoded signature of the hash of `data` - :rtype: bytes or sigencode function dependent type - """ - hashfunc = hashfunc or self.default_hashfunc - data = normalise_bytes(data) - if isinstance(self.curve.curve, CurveEdTw): - return self.sign_deterministic(data) - h = hashfunc(data).digest() - return self.sign_digest(h, entropy, sigencode, k, allow_truncate) - - def sign_digest( - self, - digest, - entropy=None, - sigencode=sigencode_string, - k=None, - allow_truncate=False, - ): - """ - Create signature over digest using the probabilistic ECDSA algorithm. - - This method uses the standard ECDSA algorithm that requires a - cryptographically secure random number generator. - - This method does not hash the input. - - It's recommended to use the - :func:`~SigningKey.sign_digest_deterministic` method - instead of this one. - - :param digest: hash value that will be signed - :type digest: :term:`bytes-like object` - :param callable entropy: randomness source, os.urandom by default - :param sigencode: function used to encode the signature. - The function needs to accept three parameters: the two integers - that are the signature and the order of the curve over which the - signature was computed. It needs to return an encoded signature. - See `ecdsa.util.sigencode_string` and `ecdsa.util.sigencode_der` - as examples of such functions. - :type sigencode: callable - :param int k: a pre-selected nonce for calculating the signature. - In typical use cases, it should be set to None (the default) to - allow its generation from an entropy source. - :param bool allow_truncate: if True, the provided digest can have - bigger bit-size than the order of the curve, the extra bits (at - the end of the digest) will be truncated. Use it when signing - SHA-384 output using NIST256p or in similar situations. - - :raises RSZeroError: in the unlikely event when "r" parameter or - "s" parameter of the created signature is equal 0, as that would - leak the key. Caller should try a better entropy source, retry with - different 'k', or use the - :func:`~SigningKey.sign_digest_deterministic` in such case. - - :return: encoded signature for the `digest` hash - :rtype: bytes or sigencode function dependent type - """ - if isinstance(self.curve.curve, CurveEdTw): - raise ValueError("Method unsupported for Edwards curves") - digest = normalise_bytes(digest) - number = _truncate_and_convert_digest( - digest, - self.curve, - allow_truncate, - ) - r, s = self.sign_number(number, entropy, k) - return sigencode(r, s, self.privkey.order) - - def sign_number(self, number, entropy=None, k=None): - """ - Sign an integer directly. - - Note, this is a low level method, usually you will want to use - :func:`~SigningKey.sign_deterministic` or - :func:`~SigningKey.sign_digest_deterministic`. - - :param int number: number to sign using the probabilistic ECDSA - algorithm. - :param callable entropy: entropy source, os.urandom by default - :param int k: pre-selected nonce for signature operation. If unset - it will be selected at random using the entropy source. - - :raises RSZeroError: in the unlikely event when "r" parameter or - "s" parameter of the created signature is equal 0, as that would - leak the key. Caller should try a better entropy source, retry with - different 'k', or use the - :func:`~SigningKey.sign_digest_deterministic` in such case. - - :return: the "r" and "s" parameters of the signature - :rtype: tuple of ints - """ - if isinstance(self.curve.curve, CurveEdTw): - raise ValueError("Method unsupported for Edwards curves") - order = self.privkey.order - - if k is not None: - _k = k - else: - _k = randrange(order, entropy) - - assert 1 <= _k < order - sig = self.privkey.sign(number, _k) - return sig.r, sig.s diff --git a/venv/lib/python3.12/site-packages/ecdsa/numbertheory.py b/venv/lib/python3.12/site-packages/ecdsa/numbertheory.py deleted file mode 100644 index fe974f8e..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/numbertheory.py +++ /dev/null @@ -1,835 +0,0 @@ -#! /usr/bin/env python -# -# Provide some simple capabilities from number theory. -# -# Version of 2008.11.14. -# -# Written in 2005 and 2006 by Peter Pearson and placed in the public domain. -# Revision history: -# 2008.11.14: Use pow(base, exponent, modulus) for modular_exp. -# Make gcd and lcm accept arbitrarily many arguments. - -from __future__ import division - -import sys -from six import integer_types, PY2 -from six.moves import reduce - -try: - xrange -except NameError: - xrange = range -try: - from gmpy2 import powmod, mpz - - GMPY2 = True - GMPY = False -except ImportError: # pragma: no branch - GMPY2 = False - try: - from gmpy import mpz - - GMPY = True - except ImportError: - GMPY = False - - -if GMPY2 or GMPY: # pragma: no branch - integer_types = tuple(integer_types + (type(mpz(1)),)) - - -import math -import warnings -import random -from .util import bit_length - - -class Error(Exception): - """Base class for exceptions in this module.""" - - pass - - -class JacobiError(Error): - pass - - -class SquareRootError(Error): - pass - - -class NegativeExponentError(Error): - pass - - -def modular_exp(base, exponent, modulus): # pragma: no cover - """Raise base to exponent, reducing by modulus""" - # deprecated in 0.14 - warnings.warn( - "Function is unused in library code. If you use this code, " - "change to pow() builtin.", - DeprecationWarning, - ) - if exponent < 0: - raise NegativeExponentError( - "Negative exponents (%d) not allowed" % exponent - ) - return pow(base, exponent, modulus) - - -def polynomial_reduce_mod(poly, polymod, p): - """Reduce poly by polymod, integer arithmetic modulo p. - - Polynomials are represented as lists of coefficients - of increasing powers of x.""" - - # This module has been tested only by extensive use - # in calculating modular square roots. - - # Just to make this easy, require a monic polynomial: - assert polymod[-1] == 1 - - assert len(polymod) > 1 - - while len(poly) >= len(polymod): - if poly[-1] != 0: - for i in xrange(2, len(polymod) + 1): - poly[-i] = (poly[-i] - poly[-1] * polymod[-i]) % p - poly = poly[0:-1] - - return poly - - -def polynomial_multiply_mod(m1, m2, polymod, p): - """Polynomial multiplication modulo a polynomial over ints mod p. - - Polynomials are represented as lists of coefficients - of increasing powers of x.""" - - # This is just a seat-of-the-pants implementation. - - # This module has been tested only by extensive use - # in calculating modular square roots. - - # Initialize the product to zero: - - prod = (len(m1) + len(m2) - 1) * [0] - - # Add together all the cross-terms: - - for i in xrange(len(m1)): - for j in xrange(len(m2)): - prod[i + j] = (prod[i + j] + m1[i] * m2[j]) % p - - return polynomial_reduce_mod(prod, polymod, p) - - -def polynomial_exp_mod(base, exponent, polymod, p): - """Polynomial exponentiation modulo a polynomial over ints mod p. - - Polynomials are represented as lists of coefficients - of increasing powers of x.""" - - # Based on the Handbook of Applied Cryptography, algorithm 2.227. - - # This module has been tested only by extensive use - # in calculating modular square roots. - - assert exponent < p - - if exponent == 0: - return [1] - - G = base - k = exponent - if k % 2 == 1: - s = G - else: - s = [1] - - while k > 1: - k = k // 2 - G = polynomial_multiply_mod(G, G, polymod, p) - if k % 2 == 1: - s = polynomial_multiply_mod(G, s, polymod, p) - - return s - - -def jacobi(a, n): - """Jacobi symbol""" - - # Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149. - - # This function has been tested by comparison with a small - # table printed in HAC, and by extensive use in calculating - # modular square roots. - - if not n >= 3: - raise JacobiError("n must be larger than 2") - if not n % 2 == 1: - raise JacobiError("n must be odd") - a = a % n - if a == 0: - return 0 - if a == 1: - return 1 - a1, e = a, 0 - while a1 % 2 == 0: - a1, e = a1 // 2, e + 1 - if e % 2 == 0 or n % 8 == 1 or n % 8 == 7: - s = 1 - else: - s = -1 - if a1 == 1: - return s - if n % 4 == 3 and a1 % 4 == 3: - s = -s - return s * jacobi(n % a1, a1) - - -def square_root_mod_prime(a, p): - """Modular square root of a, mod p, p prime.""" - - # Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39. - - # This module has been tested for all values in [0,p-1] for - # every prime p from 3 to 1229. - - assert 0 <= a < p - assert 1 < p - - if a == 0: - return 0 - if p == 2: - return a - - jac = jacobi(a, p) - if jac == -1: - raise SquareRootError("%d has no square root modulo %d" % (a, p)) - - if p % 4 == 3: - return pow(a, (p + 1) // 4, p) - - if p % 8 == 5: - d = pow(a, (p - 1) // 4, p) - if d == 1: - return pow(a, (p + 3) // 8, p) - assert d == p - 1 - return (2 * a * pow(4 * a, (p - 5) // 8, p)) % p - - if PY2: - # xrange on python2 can take integers representable as C long only - range_top = min(0x7FFFFFFF, p) - else: - range_top = p - for b in xrange(2, range_top): # pragma: no branch - if jacobi(b * b - 4 * a, p) == -1: - f = (a, -b, 1) - ff = polynomial_exp_mod((0, 1), (p + 1) // 2, f, p) - if ff[1]: - raise SquareRootError("p is not prime") - return ff[0] - # just an assertion - raise RuntimeError("No b found.") # pragma: no cover - - -# because all the inverse_mod code is arch/environment specific, and coveralls -# expects it to execute equal number of times, we need to waive it by -# adding the "no branch" pragma to all branches -if GMPY2: # pragma: no branch - - def inverse_mod(a, m): - """Inverse of a mod m.""" - if a == 0: # pragma: no branch - return 0 - return powmod(a, -1, m) - -elif GMPY: # pragma: no branch - - def inverse_mod(a, m): - """Inverse of a mod m.""" - # while libgmp does support inverses modulo, it is accessible - # only using the native `pow()` function, and `pow()` in gmpy sanity - # checks the parameters before passing them on to underlying - # implementation - if a == 0: # pragma: no branch - return 0 - a = mpz(a) - m = mpz(m) - - lm, hm = mpz(1), mpz(0) - low, high = a % m, m - while low > 1: # pragma: no branch - r = high // low - lm, low, hm, high = hm - lm * r, high - low * r, lm, low - - return lm % m - -elif sys.version_info >= (3, 8): # pragma: no branch - - def inverse_mod(a, m): - """Inverse of a mod m.""" - if a == 0: # pragma: no branch - return 0 - return pow(a, -1, m) - -else: # pragma: no branch - - def inverse_mod(a, m): - """Inverse of a mod m.""" - - if a == 0: # pragma: no branch - return 0 - - lm, hm = 1, 0 - low, high = a % m, m - while low > 1: # pragma: no branch - r = high // low - lm, low, hm, high = hm - lm * r, high - low * r, lm, low - - return lm % m - - -try: - gcd2 = math.gcd -except AttributeError: - - def gcd2(a, b): - """Greatest common divisor using Euclid's algorithm.""" - while a: - a, b = b % a, a - return b - - -def gcd(*a): - """Greatest common divisor. - - Usage: gcd([ 2, 4, 6 ]) - or: gcd(2, 4, 6) - """ - - if len(a) > 1: - return reduce(gcd2, a) - if hasattr(a[0], "__iter__"): - return reduce(gcd2, a[0]) - return a[0] - - -def lcm2(a, b): - """Least common multiple of two integers.""" - - return (a * b) // gcd(a, b) - - -def lcm(*a): - """Least common multiple. - - Usage: lcm([ 3, 4, 5 ]) - or: lcm(3, 4, 5) - """ - - if len(a) > 1: - return reduce(lcm2, a) - if hasattr(a[0], "__iter__"): - return reduce(lcm2, a[0]) - return a[0] - - -def factorization(n): - """Decompose n into a list of (prime,exponent) pairs.""" - - assert isinstance(n, integer_types) - - if n < 2: - return [] - - result = [] - - # Test the small primes: - - for d in smallprimes: - if d > n: - break - q, r = divmod(n, d) - if r == 0: - count = 1 - while d <= n: # pragma: no branch - n = q - q, r = divmod(n, d) - if r != 0: - break - count = count + 1 - result.append((d, count)) - - # If n is still greater than the last of our small primes, - # it may require further work: - - if n > smallprimes[-1]: - if is_prime(n): # If what's left is prime, it's easy: - result.append((n, 1)) - else: # Ugh. Search stupidly for a divisor: - d = smallprimes[-1] - while 1: - d = d + 2 # Try the next divisor. - q, r = divmod(n, d) - if q < d: # n < d*d means we're done, n = 1 or prime. - break - if r == 0: # d divides n. How many times? - count = 1 - n = q - # As long as d might still divide n, - while d <= n: # pragma: no branch - q, r = divmod(n, d) # see if it does. - if r != 0: - break - n = q # It does. Reduce n, increase count. - count = count + 1 - result.append((d, count)) - if n > 1: - result.append((n, 1)) - - return result - - -def phi(n): # pragma: no cover - """Return the Euler totient function of n.""" - # deprecated in 0.14 - warnings.warn( - "Function is unused by library code. If you use this code, " - "please open an issue in " - "https://github.com/tlsfuzzer/python-ecdsa", - DeprecationWarning, - ) - - assert isinstance(n, integer_types) - - if n < 3: - return 1 - - result = 1 - ff = factorization(n) - for f in ff: - e = f[1] - if e > 1: - result = result * f[0] ** (e - 1) * (f[0] - 1) - else: - result = result * (f[0] - 1) - return result - - -def carmichael(n): # pragma: no cover - """Return Carmichael function of n. - - Carmichael(n) is the smallest integer x such that - m**x = 1 mod n for all m relatively prime to n. - """ - # deprecated in 0.14 - warnings.warn( - "Function is unused by library code. If you use this code, " - "please open an issue in " - "https://github.com/tlsfuzzer/python-ecdsa", - DeprecationWarning, - ) - - return carmichael_of_factorized(factorization(n)) - - -def carmichael_of_factorized(f_list): # pragma: no cover - """Return the Carmichael function of a number that is - represented as a list of (prime,exponent) pairs. - """ - # deprecated in 0.14 - warnings.warn( - "Function is unused by library code. If you use this code, " - "please open an issue in " - "https://github.com/tlsfuzzer/python-ecdsa", - DeprecationWarning, - ) - - if len(f_list) < 1: - return 1 - - result = carmichael_of_ppower(f_list[0]) - for i in xrange(1, len(f_list)): - result = lcm(result, carmichael_of_ppower(f_list[i])) - - return result - - -def carmichael_of_ppower(pp): # pragma: no cover - """Carmichael function of the given power of the given prime.""" - # deprecated in 0.14 - warnings.warn( - "Function is unused by library code. If you use this code, " - "please open an issue in " - "https://github.com/tlsfuzzer/python-ecdsa", - DeprecationWarning, - ) - - p, a = pp - if p == 2 and a > 2: - return 2 ** (a - 2) - else: - return (p - 1) * p ** (a - 1) - - -def order_mod(x, m): # pragma: no cover - """Return the order of x in the multiplicative group mod m.""" - # deprecated in 0.14 - warnings.warn( - "Function is unused by library code. If you use this code, " - "please open an issue in " - "https://github.com/tlsfuzzer/python-ecdsa", - DeprecationWarning, - ) - - # Warning: this implementation is not very clever, and will - # take a long time if m is very large. - - if m <= 1: - return 0 - - assert gcd(x, m) == 1 - - z = x - result = 1 - while z != 1: - z = (z * x) % m - result = result + 1 - return result - - -def largest_factor_relatively_prime(a, b): # pragma: no cover - """Return the largest factor of a relatively prime to b.""" - # deprecated in 0.14 - warnings.warn( - "Function is unused by library code. If you use this code, " - "please open an issue in " - "https://github.com/tlsfuzzer/python-ecdsa", - DeprecationWarning, - ) - - while 1: - d = gcd(a, b) - if d <= 1: - break - b = d - while 1: - q, r = divmod(a, d) - if r > 0: - break - a = q - return a - - -def kinda_order_mod(x, m): # pragma: no cover - """Return the order of x in the multiplicative group mod m', - where m' is the largest factor of m relatively prime to x. - """ - # deprecated in 0.14 - warnings.warn( - "Function is unused by library code. If you use this code, " - "please open an issue in " - "https://github.com/tlsfuzzer/python-ecdsa", - DeprecationWarning, - ) - - return order_mod(x, largest_factor_relatively_prime(m, x)) - - -def is_prime(n): - """Return True if x is prime, False otherwise. - - We use the Miller-Rabin test, as given in Menezes et al. p. 138. - This test is not exact: there are composite values n for which - it returns True. - - In testing the odd numbers from 10000001 to 19999999, - about 66 composites got past the first test, - 5 got past the second test, and none got past the third. - Since factors of 2, 3, 5, 7, and 11 were detected during - preliminary screening, the number of numbers tested by - Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7) - = 4.57 million. - """ - - # (This is used to study the risk of false positives:) - global miller_rabin_test_count - - miller_rabin_test_count = 0 - - if n <= smallprimes[-1]: - if n in smallprimes: - return True - else: - return False - # 2310 = 2 * 3 * 5 * 7 * 11 - if gcd(n, 2310) != 1: - return False - - # Choose a number of iterations sufficient to reduce the - # probability of accepting a composite below 2**-80 - # (from Menezes et al. Table 4.4): - - t = 40 - n_bits = 1 + bit_length(n) - assert 11 <= n_bits <= 16384 - for k, tt in ( - (100, 27), - (150, 18), - (200, 15), - (250, 12), - (300, 9), - (350, 8), - (400, 7), - (450, 6), - (550, 5), - (650, 4), - (850, 3), - (1300, 2), - ): - if n_bits < k: - break - t = tt - - # Run the test t times: - - s = 0 - r = n - 1 - while (r % 2) == 0: - s = s + 1 - r = r // 2 - for i in xrange(t): - a = random.choice(smallprimes) - y = pow(a, r, n) - if y != 1 and y != n - 1: - j = 1 - while j <= s - 1 and y != n - 1: - y = pow(y, 2, n) - if y == 1: - miller_rabin_test_count = i + 1 - return False - j = j + 1 - if y != n - 1: - miller_rabin_test_count = i + 1 - return False - return True - - -def next_prime(starting_value): - """Return the smallest prime larger than the starting value.""" - - if starting_value < 2: - return 2 - result = (starting_value + 1) | 1 - while not is_prime(result): - result = result + 2 - return result - - -smallprimes = [ - 2, - 3, - 5, - 7, - 11, - 13, - 17, - 19, - 23, - 29, - 31, - 37, - 41, - 43, - 47, - 53, - 59, - 61, - 67, - 71, - 73, - 79, - 83, - 89, - 97, - 101, - 103, - 107, - 109, - 113, - 127, - 131, - 137, - 139, - 149, - 151, - 157, - 163, - 167, - 173, - 179, - 181, - 191, - 193, - 197, - 199, - 211, - 223, - 227, - 229, - 233, - 239, - 241, - 251, - 257, - 263, - 269, - 271, - 277, - 281, - 283, - 293, - 307, - 311, - 313, - 317, - 331, - 337, - 347, - 349, - 353, - 359, - 367, - 373, - 379, - 383, - 389, - 397, - 401, - 409, - 419, - 421, - 431, - 433, - 439, - 443, - 449, - 457, - 461, - 463, - 467, - 479, - 487, - 491, - 499, - 503, - 509, - 521, - 523, - 541, - 547, - 557, - 563, - 569, - 571, - 577, - 587, - 593, - 599, - 601, - 607, - 613, - 617, - 619, - 631, - 641, - 643, - 647, - 653, - 659, - 661, - 673, - 677, - 683, - 691, - 701, - 709, - 719, - 727, - 733, - 739, - 743, - 751, - 757, - 761, - 769, - 773, - 787, - 797, - 809, - 811, - 821, - 823, - 827, - 829, - 839, - 853, - 857, - 859, - 863, - 877, - 881, - 883, - 887, - 907, - 911, - 919, - 929, - 937, - 941, - 947, - 953, - 967, - 971, - 977, - 983, - 991, - 997, - 1009, - 1013, - 1019, - 1021, - 1031, - 1033, - 1039, - 1049, - 1051, - 1061, - 1063, - 1069, - 1087, - 1091, - 1093, - 1097, - 1103, - 1109, - 1117, - 1123, - 1129, - 1151, - 1153, - 1163, - 1171, - 1181, - 1187, - 1193, - 1201, - 1213, - 1217, - 1223, - 1229, -] - -miller_rabin_test_count = 0 diff --git a/venv/lib/python3.12/site-packages/ecdsa/rfc6979.py b/venv/lib/python3.12/site-packages/ecdsa/rfc6979.py deleted file mode 100644 index 0728b5a4..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/rfc6979.py +++ /dev/null @@ -1,113 +0,0 @@ -""" -RFC 6979: - Deterministic Usage of the Digital Signature Algorithm (DSA) and - Elliptic Curve Digital Signature Algorithm (ECDSA) - - http://tools.ietf.org/html/rfc6979 - -Many thanks to Coda Hale for his implementation in Go language: - https://github.com/codahale/rfc6979 -""" - -import hmac -from binascii import hexlify -from .util import number_to_string, number_to_string_crop, bit_length -from ._compat import hmac_compat - - -# bit_length was defined in this module previously so keep it for backwards -# compatibility, will need to deprecate and remove it later -__all__ = ["bit_length", "bits2int", "bits2octets", "generate_k"] - - -def bits2int(data, qlen): - x = int(hexlify(data), 16) - l = len(data) * 8 - - if l > qlen: - return x >> (l - qlen) - return x - - -def bits2octets(data, order): - z1 = bits2int(data, bit_length(order)) - z2 = z1 - order - - if z2 < 0: - z2 = z1 - - return number_to_string_crop(z2, order) - - -# https://tools.ietf.org/html/rfc6979#section-3.2 -def generate_k(order, secexp, hash_func, data, retry_gen=0, extra_entropy=b""): - """ - Generate the ``k`` value - the nonce for DSA. - - :param int order: order of the DSA generator used in the signature - :param int secexp: secure exponent (private key) in numeric form - :param hash_func: reference to the same hash function used for generating - hash, like :py:class:`hashlib.sha1` - :param bytes data: hash in binary form of the signing data - :param int retry_gen: how many good 'k' values to skip before returning - :param bytes extra_entropy: additional added data in binary form as per - section-3.6 of rfc6979 - :rtype: int - """ - - qlen = bit_length(order) - holen = hash_func().digest_size - rolen = (qlen + 7) // 8 - bx = ( - hmac_compat(number_to_string(secexp, order)), - hmac_compat(bits2octets(data, order)), - hmac_compat(extra_entropy), - ) - - # Step B - v = b"\x01" * holen - - # Step C - k = b"\x00" * holen - - # Step D - - k = hmac.new(k, digestmod=hash_func) - k.update(v + b"\x00") - for i in bx: - k.update(i) - k = k.digest() - - # Step E - v = hmac.new(k, v, hash_func).digest() - - # Step F - k = hmac.new(k, digestmod=hash_func) - k.update(v + b"\x01") - for i in bx: - k.update(i) - k = k.digest() - - # Step G - v = hmac.new(k, v, hash_func).digest() - - # Step H - while True: - # Step H1 - t = b"" - - # Step H2 - while len(t) < rolen: - v = hmac.new(k, v, hash_func).digest() - t += v - - # Step H3 - secret = bits2int(t, qlen) - - if 1 <= secret < order: - if retry_gen <= 0: - return secret - retry_gen -= 1 - - k = hmac.new(k, v + b"\x00", hash_func).digest() - v = hmac.new(k, v, hash_func).digest() diff --git a/venv/lib/python3.12/site-packages/ecdsa/ssh.py b/venv/lib/python3.12/site-packages/ecdsa/ssh.py deleted file mode 100644 index 64e94030..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/ssh.py +++ /dev/null @@ -1,83 +0,0 @@ -import binascii -from . import der -from ._compat import compat26_str, int_to_bytes - -_SSH_ED25519 = b"ssh-ed25519" -_SK_MAGIC = b"openssh-key-v1\0" -_NONE = b"none" - - -def _get_key_type(name): - if name == "Ed25519": - return _SSH_ED25519 - else: - raise ValueError("Unsupported key type") - - -class _Serializer: - def __init__(self): - self.bytes = b"" - - def put_raw(self, val): - self.bytes += val - - def put_u32(self, val): - self.bytes += int_to_bytes(val, length=4, byteorder="big") - - def put_str(self, val): - self.put_u32(len(val)) - self.bytes += val - - def put_pad(self, blklen=8): - padlen = blklen - (len(self.bytes) % blklen) - self.put_raw(bytearray(range(1, 1 + padlen))) - - def encode(self): - return binascii.b2a_base64(compat26_str(self.bytes)) - - def tobytes(self): - return self.bytes - - def topem(self): - return der.topem(self.bytes, "OPENSSH PRIVATE KEY") - - -def serialize_public(name, pub): - serial = _Serializer() - ktype = _get_key_type(name) - serial.put_str(ktype) - serial.put_str(pub) - return b" ".join([ktype, serial.encode()]) - - -def serialize_private(name, pub, priv): - # encode public part - spub = _Serializer() - ktype = _get_key_type(name) - spub.put_str(ktype) - spub.put_str(pub) - - # encode private part - spriv = _Serializer() - checksum = 0 - spriv.put_u32(checksum) - spriv.put_u32(checksum) - spriv.put_raw(spub.tobytes()) - spriv.put_str(priv + pub) - comment = b"" - spriv.put_str(comment) - spriv.put_pad() - - # top-level structure - main = _Serializer() - main.put_raw(_SK_MAGIC) - ciphername = kdfname = _NONE - main.put_str(ciphername) - main.put_str(kdfname) - nokdf = 0 - main.put_u32(nokdf) - nkeys = 1 - main.put_u32(nkeys) - main.put_str(spub.tobytes()) - main.put_str(spriv.tobytes()) - return main.topem() diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_curves.py b/venv/lib/python3.12/site-packages/ecdsa/test_curves.py deleted file mode 100644 index 93b6c9bd..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_curves.py +++ /dev/null @@ -1,361 +0,0 @@ -try: - import unittest2 as unittest -except ImportError: - import unittest - -import base64 -import pytest -from .curves import ( - Curve, - NIST256p, - curves, - UnknownCurveError, - PRIME_FIELD_OID, - curve_by_name, -) -from .ellipticcurve import CurveFp, PointJacobi, CurveEdTw -from . import der -from .util import number_to_string - - -class TestParameterEncoding(unittest.TestCase): - @classmethod - def setUpClass(cls): - # minimal, but with cofactor (excludes seed when compared to - # OpenSSL output) - cls.base64_params = ( - "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////" - "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K" - "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd" - "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1" - "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=" - ) - - def test_from_pem(self): - pem_params = ( - "-----BEGIN EC PARAMETERS-----\n" - "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" - "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" - "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" - "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" - "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" - "-----END EC PARAMETERS-----\n" - ) - curve = Curve.from_pem(pem_params) - - self.assertIs(curve, NIST256p) - - def test_from_pem_with_explicit_when_explicit_disabled(self): - pem_params = ( - "-----BEGIN EC PARAMETERS-----\n" - "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" - "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" - "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" - "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" - "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" - "-----END EC PARAMETERS-----\n" - ) - with self.assertRaises(der.UnexpectedDER) as e: - Curve.from_pem(pem_params, ["named_curve"]) - - self.assertIn("explicit curve parameters not", str(e.exception)) - - def test_from_pem_with_named_curve_with_named_curve_disabled(self): - pem_params = ( - "-----BEGIN EC PARAMETERS-----\n" - "BggqhkjOPQMBBw==\n" - "-----END EC PARAMETERS-----\n" - ) - with self.assertRaises(der.UnexpectedDER) as e: - Curve.from_pem(pem_params, ["explicit"]) - - self.assertIn("named_curve curve parameters not", str(e.exception)) - - def test_from_pem_with_wrong_header(self): - pem_params = ( - "-----BEGIN PARAMETERS-----\n" - "MIHgAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP/////////\n" - "//////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12K\n" - "o6k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEQQRrF9Hy4SxCR/i85uVjpEDyd\n" - "wN9gS3rM6D0oTlF2JjClk/jQuL+Gn+bjufrSnwPnhYrzjNXazFezsu2QGg3v1H1\n" - "AiEA/////wAAAAD//////////7zm+q2nF56E87nKwvxjJVECAQE=\n" - "-----END PARAMETERS-----\n" - ) - with self.assertRaises(der.UnexpectedDER) as e: - Curve.from_pem(pem_params) - - self.assertIn("PARAMETERS PEM header", str(e.exception)) - - def test_to_pem(self): - pem_params = ( - b"-----BEGIN EC PARAMETERS-----\n" - b"BggqhkjOPQMBBw==\n" - b"-----END EC PARAMETERS-----\n" - ) - encoding = NIST256p.to_pem() - - self.assertEqual(pem_params, encoding) - - def test_compare_with_different_object(self): - self.assertNotEqual(NIST256p, 256) - - def test_named_curve_params_der(self): - encoded = NIST256p.to_der() - - # just the encoding of the NIST256p OID (prime256v1) - self.assertEqual(b"\x06\x08\x2a\x86\x48\xce\x3d\x03\x01\x07", encoded) - - def test_verify_that_default_is_named_curve_der(self): - encoded_default = NIST256p.to_der() - encoded_named = NIST256p.to_der("named_curve") - - self.assertEqual(encoded_default, encoded_named) - - def test_encoding_to_explicit_params(self): - encoded = NIST256p.to_der("explicit") - - self.assertEqual(encoded, bytes(base64.b64decode(self.base64_params))) - - def test_encoding_to_unsupported_type(self): - with self.assertRaises(ValueError) as e: - NIST256p.to_der("unsupported") - - self.assertIn("Only 'named_curve'", str(e.exception)) - - def test_encoding_to_explicit_compressed_params(self): - encoded = NIST256p.to_der("explicit", "compressed") - - compressed_base_point = ( - "MIHAAgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" - "/////zBEBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" - "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsEIQNrF9Hy4SxCR/i85uVjpEDydwN9" - "gS3rM6D0oTlF2JjClgIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8YyVR" - "AgEB" - ) - - self.assertEqual( - encoded, bytes(base64.b64decode(compressed_base_point)) - ) - - def test_decoding_explicit_from_openssl(self): - # generated with openssl 1.1.1k using - # openssl ecparam -name P-256 -param_enc explicit -out /tmp/file.pem - p256_explicit = ( - "MIH3AgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP//////////" - "/////zBbBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6" - "k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsDFQDEnTYIhucEk2pmeOETnSa3gZ9+" - "kARBBGsX0fLhLEJH+Lzm5WOkQPJ3A32BLeszoPShOUXYmMKWT+NC4v4af5uO5+tK" - "fA+eFivOM1drMV7Oy7ZAaDe/UfUCIQD/////AAAAAP//////////vOb6racXnoTz" - "ucrC/GMlUQIBAQ==" - ) - - decoded = Curve.from_der(bytes(base64.b64decode(p256_explicit))) - - self.assertEqual(NIST256p, decoded) - - def test_decoding_well_known_from_explicit_params(self): - curve = Curve.from_der(bytes(base64.b64decode(self.base64_params))) - - self.assertIs(curve, NIST256p) - - def test_decoding_with_incorrect_valid_encodings(self): - with self.assertRaises(ValueError) as e: - Curve.from_der(b"", ["explicitCA"]) - - self.assertIn("Only named_curve", str(e.exception)) - - def test_compare_curves_with_different_generators(self): - curve_fp = CurveFp(23, 1, 7) - base_a = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) - base_b = PointJacobi(curve_fp, 1, 20, 1, 9, generator=True) - - curve_a = Curve("unknown", curve_fp, base_a, None) - curve_b = Curve("unknown", curve_fp, base_b, None) - - self.assertNotEqual(curve_a, curve_b) - - def test_default_encode_for_custom_curve(self): - curve_fp = CurveFp(23, 1, 7) - base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) - - curve = Curve("unknown", curve_fp, base_point, None) - - encoded = curve.to_der() - - decoded = Curve.from_der(encoded) - - self.assertEqual(curve, decoded) - - expected = "MCECAQEwDAYHKoZIzj0BAQIBFzAGBAEBBAEHBAMEDQMCAQk=" - - self.assertEqual(encoded, bytes(base64.b64decode(expected))) - - def test_named_curve_encode_for_custom_curve(self): - curve_fp = CurveFp(23, 1, 7) - base_point = PointJacobi(curve_fp, 13, 3, 1, 9, generator=True) - - curve = Curve("unknown", curve_fp, base_point, None) - - with self.assertRaises(UnknownCurveError) as e: - curve.to_der("named_curve") - - self.assertIn("Can't encode curve", str(e.exception)) - - def test_try_decoding_binary_explicit(self): - sect113r1_explicit = ( - "MIGRAgEBMBwGByqGSM49AQIwEQIBcQYJKoZIzj0BAgMCAgEJMDkEDwAwiCUMpufH" - "/mSc6Fgg9wQPAOi+5NPiJgdEGIvg6ccjAxUAEOcjqxTWluZ2h1YVF1b+v4/LSakE" - "HwQAnXNhbzX0qxQH1zViwQ8ApSgwJ3lY7oTRMV7TGIYCDwEAAAAAAAAA2czsijnl" - "bwIBAg==" - ) - - with self.assertRaises(UnknownCurveError) as e: - Curve.from_der(base64.b64decode(sect113r1_explicit)) - - self.assertIn("Characteristic 2 curves unsupported", str(e.exception)) - - def test_decode_malformed_named_curve(self): - bad_der = der.encode_oid(*NIST256p.oid) + der.encode_integer(1) - - with self.assertRaises(der.UnexpectedDER) as e: - Curve.from_der(bad_der) - - self.assertIn("Unexpected data after OID", str(e.exception)) - - def test_decode_malformed_explicit_garbage_after_ECParam(self): - bad_der = bytes( - base64.b64decode(self.base64_params) - ) + der.encode_integer(1) - - with self.assertRaises(der.UnexpectedDER) as e: - Curve.from_der(bad_der) - - self.assertIn("Unexpected data after ECParameters", str(e.exception)) - - def test_decode_malformed_unknown_version_number(self): - bad_der = der.encode_sequence(der.encode_integer(2)) - - with self.assertRaises(der.UnexpectedDER) as e: - Curve.from_der(bad_der) - - self.assertIn("Unknown parameter encoding format", str(e.exception)) - - def test_decode_malformed_unknown_field_type(self): - curve_p = NIST256p.curve.p() - bad_der = der.encode_sequence( - der.encode_integer(1), - der.encode_sequence( - der.encode_oid(1, 2, 3), der.encode_integer(curve_p) - ), - der.encode_sequence( - der.encode_octet_string( - number_to_string(NIST256p.curve.a() % curve_p, curve_p) - ), - der.encode_octet_string( - number_to_string(NIST256p.curve.b(), curve_p) - ), - ), - der.encode_octet_string( - NIST256p.generator.to_bytes("uncompressed") - ), - der.encode_integer(NIST256p.generator.order()), - ) - - with self.assertRaises(UnknownCurveError) as e: - Curve.from_der(bad_der) - - self.assertIn("Unknown field type: (1, 2, 3)", str(e.exception)) - - def test_decode_malformed_garbage_after_prime(self): - curve_p = NIST256p.curve.p() - bad_der = der.encode_sequence( - der.encode_integer(1), - der.encode_sequence( - der.encode_oid(*PRIME_FIELD_OID), - der.encode_integer(curve_p), - der.encode_integer(1), - ), - der.encode_sequence( - der.encode_octet_string( - number_to_string(NIST256p.curve.a() % curve_p, curve_p) - ), - der.encode_octet_string( - number_to_string(NIST256p.curve.b(), curve_p) - ), - ), - der.encode_octet_string( - NIST256p.generator.to_bytes("uncompressed") - ), - der.encode_integer(NIST256p.generator.order()), - ) - - with self.assertRaises(der.UnexpectedDER) as e: - Curve.from_der(bad_der) - - self.assertIn("Prime-p element", str(e.exception)) - - -class TestCurveSearching(unittest.TestCase): - def test_correct_name(self): - c = curve_by_name("NIST256p") - self.assertIs(c, NIST256p) - - def test_openssl_name(self): - c = curve_by_name("prime256v1") - self.assertIs(c, NIST256p) - - def test_unknown_curve(self): - with self.assertRaises(UnknownCurveError) as e: - curve_by_name("foo bar") - - self.assertIn( - "name 'foo bar' unknown, only curves supported: " - "['NIST192p', 'NIST224p'", - str(e.exception), - ) - - def test_with_None_as_parameter(self): - with self.assertRaises(UnknownCurveError) as e: - curve_by_name(None) - - self.assertIn( - "name None unknown, only curves supported: " - "['NIST192p', 'NIST224p'", - str(e.exception), - ) - - -@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) -def test_curve_params_encode_decode_named(curve): - ret = Curve.from_der(curve.to_der("named_curve")) - - assert curve == ret - - -@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) -def test_curve_params_encode_decode_explicit(curve): - if isinstance(curve.curve, CurveEdTw): - with pytest.raises(UnknownCurveError): - curve.to_der("explicit") - else: - ret = Curve.from_der(curve.to_der("explicit")) - - assert curve == ret - - -@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) -def test_curve_params_encode_decode_default(curve): - ret = Curve.from_der(curve.to_der()) - - assert curve == ret - - -@pytest.mark.parametrize("curve", curves, ids=[i.name for i in curves]) -def test_curve_params_encode_decode_explicit_compressed(curve): - if isinstance(curve.curve, CurveEdTw): - with pytest.raises(UnknownCurveError): - curve.to_der("explicit", "compressed") - else: - ret = Curve.from_der(curve.to_der("explicit", "compressed")) - - assert curve == ret diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_der.py b/venv/lib/python3.12/site-packages/ecdsa/test_der.py deleted file mode 100644 index b0955431..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_der.py +++ /dev/null @@ -1,602 +0,0 @@ -# compatibility with Python 2.6, for that we need unittest2 package, -# which is not available on 3.3 or 3.4 -import warnings -from binascii import hexlify - -try: - import unittest2 as unittest -except ImportError: - import unittest -import sys -import hypothesis.strategies as st -from hypothesis import given, settings -import pytest -from ._compat import str_idx_as_int -from .curves import NIST256p, NIST224p -from .der import ( - remove_integer, - UnexpectedDER, - read_length, - encode_bitstring, - remove_bitstring, - remove_object, - encode_oid, - remove_constructed, - remove_implicit, - remove_octet_string, - remove_sequence, - encode_implicit, -) - - -class TestRemoveInteger(unittest.TestCase): - # DER requires the integers to be 0-padded only if they would be - # interpreted as negative, check if those errors are detected - def test_non_minimal_encoding(self): - with self.assertRaises(UnexpectedDER): - remove_integer(b"\x02\x02\x00\x01") - - def test_negative_with_high_bit_set(self): - with self.assertRaises(UnexpectedDER): - remove_integer(b"\x02\x01\x80") - - def test_minimal_with_high_bit_set(self): - val, rem = remove_integer(b"\x02\x02\x00\x80") - - self.assertEqual(val, 0x80) - self.assertEqual(rem, b"") - - def test_two_zero_bytes_with_high_bit_set(self): - with self.assertRaises(UnexpectedDER): - remove_integer(b"\x02\x03\x00\x00\xff") - - def test_zero_length_integer(self): - with self.assertRaises(UnexpectedDER): - remove_integer(b"\x02\x00") - - def test_empty_string(self): - with self.assertRaises(UnexpectedDER): - remove_integer(b"") - - def test_encoding_of_zero(self): - val, rem = remove_integer(b"\x02\x01\x00") - - self.assertEqual(val, 0) - self.assertEqual(rem, b"") - - def test_encoding_of_127(self): - val, rem = remove_integer(b"\x02\x01\x7f") - - self.assertEqual(val, 127) - self.assertEqual(rem, b"") - - def test_encoding_of_128(self): - val, rem = remove_integer(b"\x02\x02\x00\x80") - - self.assertEqual(val, 128) - self.assertEqual(rem, b"") - - def test_wrong_tag(self): - with self.assertRaises(UnexpectedDER) as e: - remove_integer(b"\x01\x02\x00\x80") - - self.assertIn("wanted type 'integer'", str(e.exception)) - - def test_wrong_length(self): - with self.assertRaises(UnexpectedDER) as e: - remove_integer(b"\x02\x03\x00\x80") - - self.assertIn("Length longer", str(e.exception)) - - -class TestReadLength(unittest.TestCase): - # DER requires the lengths between 0 and 127 to be encoded using the short - # form and lengths above that encoded with minimal number of bytes - # necessary - def test_zero_length(self): - self.assertEqual((0, 1), read_length(b"\x00")) - - def test_two_byte_zero_length(self): - with self.assertRaises(UnexpectedDER): - read_length(b"\x81\x00") - - def test_two_byte_small_length(self): - with self.assertRaises(UnexpectedDER): - read_length(b"\x81\x7f") - - def test_long_form_with_zero_length(self): - with self.assertRaises(UnexpectedDER): - read_length(b"\x80") - - def test_smallest_two_byte_length(self): - self.assertEqual((128, 2), read_length(b"\x81\x80")) - - def test_zero_padded_length(self): - with self.assertRaises(UnexpectedDER): - read_length(b"\x82\x00\x80") - - def test_two_three_byte_length(self): - self.assertEqual((256, 3), read_length(b"\x82\x01\x00")) - - def test_empty_string(self): - with self.assertRaises(UnexpectedDER): - read_length(b"") - - def test_length_overflow(self): - with self.assertRaises(UnexpectedDER): - read_length(b"\x83\x01\x00") - - -class TestEncodeBitstring(unittest.TestCase): - # DER requires BIT STRINGS to include a number of padding bits in the - # encoded byte string, that padding must be between 0 and 7 - - def test_old_call_convention(self): - """This is the old way to use the function.""" - warnings.simplefilter("always") - with pytest.warns(DeprecationWarning) as warns: - der = encode_bitstring(b"\x00\xff") - - self.assertEqual(len(warns), 1) - self.assertIn( - "unused= needs to be specified", warns[0].message.args[0] - ) - - self.assertEqual(der, b"\x03\x02\x00\xff") - - def test_new_call_convention(self): - """This is how it should be called now.""" - # make sure no warnings are raised - with warnings.catch_warnings(): - warnings.simplefilter("error") - der = encode_bitstring(b"\xff", 0) - - self.assertEqual(der, b"\x03\x02\x00\xff") - - def test_implicit_unused_bits(self): - """ - Writing bit string with already included the number of unused bits. - """ - # make sure no warnings are raised - with warnings.catch_warnings(): - warnings.simplefilter("error") - der = encode_bitstring(b"\x00\xff", None) - - self.assertEqual(der, b"\x03\x02\x00\xff") - - def test_explicit_unused_bits(self): - der = encode_bitstring(b"\xff\xf0", 4) - - self.assertEqual(der, b"\x03\x03\x04\xff\xf0") - - def test_empty_string(self): - self.assertEqual(encode_bitstring(b"", 0), b"\x03\x01\x00") - - def test_invalid_unused_count(self): - with self.assertRaises(ValueError): - encode_bitstring(b"\xff\x00", 8) - - def test_invalid_unused_with_empty_string(self): - with self.assertRaises(ValueError): - encode_bitstring(b"", 1) - - def test_non_zero_padding_bits(self): - with self.assertRaises(ValueError): - encode_bitstring(b"\xff", 2) - - -class TestRemoveBitstring(unittest.TestCase): - def test_old_call_convention(self): - """This is the old way to call the function.""" - warnings.simplefilter("always") - with pytest.warns(DeprecationWarning) as warns: - bits, rest = remove_bitstring(b"\x03\x02\x00\xff") - - self.assertEqual(len(warns), 1) - self.assertIn( - "expect_unused= needs to be specified", warns[0].message.args[0] - ) - - self.assertEqual(bits, b"\x00\xff") - self.assertEqual(rest, b"") - - def test_new_call_convention(self): - # make sure no warnings are raised - with warnings.catch_warnings(): - warnings.simplefilter("error") - bits, rest = remove_bitstring(b"\x03\x02\x00\xff", 0) - - self.assertEqual(bits, b"\xff") - self.assertEqual(rest, b"") - - def test_implicit_unexpected_unused(self): - # make sure no warnings are raised - with warnings.catch_warnings(): - warnings.simplefilter("error") - bits, rest = remove_bitstring(b"\x03\x02\x00\xff", None) - - self.assertEqual(bits, (b"\xff", 0)) - self.assertEqual(rest, b"") - - def test_with_padding(self): - ret, rest = remove_bitstring(b"\x03\x02\x04\xf0", None) - - self.assertEqual(ret, (b"\xf0", 4)) - self.assertEqual(rest, b"") - - def test_not_a_bitstring(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"\x02\x02\x00\xff", None) - - def test_empty_encoding(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"\x03\x00", None) - - def test_empty_string(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"", None) - - def test_no_length(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"\x03", None) - - def test_unexpected_number_of_unused_bits(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"\x03\x02\x00\xff", 1) - - def test_invalid_encoding_of_unused_bits(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"\x03\x03\x08\xff\x00", None) - - def test_invalid_encoding_of_empty_string(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"\x03\x01\x01", None) - - def test_invalid_padding_bits(self): - with self.assertRaises(UnexpectedDER): - remove_bitstring(b"\x03\x02\x01\xff", None) - - -class TestStrIdxAsInt(unittest.TestCase): - def test_str(self): - self.assertEqual(115, str_idx_as_int("str", 0)) - - def test_bytes(self): - self.assertEqual(115, str_idx_as_int(b"str", 0)) - - def test_bytearray(self): - self.assertEqual(115, str_idx_as_int(bytearray(b"str"), 0)) - - -class TestEncodeOid(unittest.TestCase): - def test_pub_key_oid(self): - oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) - self.assertEqual(hexlify(oid_ecPublicKey), b"06072a8648ce3d0201") - - def test_nist224p_oid(self): - self.assertEqual(hexlify(NIST224p.encoded_oid), b"06052b81040021") - - def test_nist256p_oid(self): - self.assertEqual( - hexlify(NIST256p.encoded_oid), b"06082a8648ce3d030107" - ) - - def test_large_second_subid(self): - # from X.690, section 8.19.5 - oid = encode_oid(2, 999, 3) - self.assertEqual(oid, b"\x06\x03\x88\x37\x03") - - def test_with_two_subids(self): - oid = encode_oid(2, 999) - self.assertEqual(oid, b"\x06\x02\x88\x37") - - def test_zero_zero(self): - oid = encode_oid(0, 0) - self.assertEqual(oid, b"\x06\x01\x00") - - def test_with_wrong_types(self): - with self.assertRaises((TypeError, AssertionError)): - encode_oid(0, None) - - def test_with_small_first_large_second(self): - with self.assertRaises(AssertionError): - encode_oid(1, 40) - - def test_small_first_max_second(self): - oid = encode_oid(1, 39) - self.assertEqual(oid, b"\x06\x01\x4f") - - def test_with_invalid_first(self): - with self.assertRaises(AssertionError): - encode_oid(3, 39) - - -class TestRemoveObject(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.oid_ecPublicKey = encode_oid(1, 2, 840, 10045, 2, 1) - - def test_pub_key_oid(self): - oid, rest = remove_object(self.oid_ecPublicKey) - self.assertEqual(rest, b"") - self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) - - def test_with_extra_bytes(self): - oid, rest = remove_object(self.oid_ecPublicKey + b"more") - self.assertEqual(rest, b"more") - self.assertEqual(oid, (1, 2, 840, 10045, 2, 1)) - - def test_with_large_second_subid(self): - # from X.690, section 8.19.5 - oid, rest = remove_object(b"\x06\x03\x88\x37\x03") - self.assertEqual(rest, b"") - self.assertEqual(oid, (2, 999, 3)) - - def test_with_padded_first_subid(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x06\x02\x80\x00") - - def test_with_padded_second_subid(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x06\x04\x88\x37\x80\x01") - - def test_with_missing_last_byte_of_multi_byte(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x06\x03\x88\x37\x83") - - def test_with_two_subids(self): - oid, rest = remove_object(b"\x06\x02\x88\x37") - self.assertEqual(rest, b"") - self.assertEqual(oid, (2, 999)) - - def test_zero_zero(self): - oid, rest = remove_object(b"\x06\x01\x00") - self.assertEqual(rest, b"") - self.assertEqual(oid, (0, 0)) - - def test_empty_string(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"") - - def test_missing_length(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x06") - - def test_empty_oid(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x06\x00") - - def test_empty_oid_overflow(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x06\x01") - - def test_with_wrong_type(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x04\x02\x88\x37") - - def test_with_too_long_length(self): - with self.assertRaises(UnexpectedDER): - remove_object(b"\x06\x03\x88\x37") - - -class TestRemoveConstructed(unittest.TestCase): - def test_simple(self): - data = b"\xa1\x02\xff\xaa" - - tag, body, rest = remove_constructed(data) - - self.assertEqual(tag, 0x01) - self.assertEqual(body, b"\xff\xaa") - self.assertEqual(rest, b"") - - def test_with_malformed_tag(self): - data = b"\x01\x02\xff\xaa" - - with self.assertRaises(UnexpectedDER) as e: - remove_constructed(data) - - self.assertIn("constructed tag", str(e.exception)) - - -class TestRemoveImplicit(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.exp_tag = 6 - cls.exp_data = b"\x0a\x0b" - # data with application tag class - cls.data_application = b"\x46\x02\x0a\x0b" - # data with context-specific tag class - cls.data_context_specific = b"\x86\x02\x0a\x0b" - # data with private tag class - cls.data_private = b"\xc6\x02\x0a\x0b" - - def test_simple(self): - tag, body, rest = remove_implicit(self.data_context_specific) - - self.assertEqual(tag, self.exp_tag) - self.assertEqual(body, self.exp_data) - self.assertEqual(rest, b"") - - def test_wrong_expected_class(self): - with self.assertRaises(ValueError) as e: - remove_implicit(self.data_context_specific, "foobar") - - self.assertIn("invalid `exp_class` value", str(e.exception)) - - def test_with_wrong_class(self): - with self.assertRaises(UnexpectedDER) as e: - remove_implicit(self.data_application) - - self.assertIn( - "wanted class context-specific, got 0x46 tag", str(e.exception) - ) - - def test_with_application_class(self): - tag, body, rest = remove_implicit(self.data_application, "application") - - self.assertEqual(tag, self.exp_tag) - self.assertEqual(body, self.exp_data) - self.assertEqual(rest, b"") - - def test_with_private_class(self): - tag, body, rest = remove_implicit(self.data_private, "private") - - self.assertEqual(tag, self.exp_tag) - self.assertEqual(body, self.exp_data) - self.assertEqual(rest, b"") - - def test_with_data_following(self): - extra_data = b"\x00\x01" - - tag, body, rest = remove_implicit( - self.data_context_specific + extra_data - ) - - self.assertEqual(tag, self.exp_tag) - self.assertEqual(body, self.exp_data) - self.assertEqual(rest, extra_data) - - def test_with_constructed(self): - data = b"\xa6\x02\x0a\x0b" - - with self.assertRaises(UnexpectedDER) as e: - remove_implicit(data) - - self.assertIn("wanted type primitive, got 0xa6 tag", str(e.exception)) - - def test_encode_decode(self): - data = b"some longish string" - - tag, body, rest = remove_implicit( - encode_implicit(6, data, "application"), "application" - ) - - self.assertEqual(tag, 6) - self.assertEqual(body, data) - self.assertEqual(rest, b"") - - -class TestEncodeImplicit(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.data = b"\x0a\x0b" - # data with application tag class - cls.data_application = b"\x46\x02\x0a\x0b" - # data with context-specific tag class - cls.data_context_specific = b"\x86\x02\x0a\x0b" - # data with private tag class - cls.data_private = b"\xc6\x02\x0a\x0b" - - def test_encode_with_default_class(self): - ret = encode_implicit(6, self.data) - - self.assertEqual(ret, self.data_context_specific) - - def test_encode_with_application_class(self): - ret = encode_implicit(6, self.data, "application") - - self.assertEqual(ret, self.data_application) - - def test_encode_with_context_specific_class(self): - ret = encode_implicit(6, self.data, "context-specific") - - self.assertEqual(ret, self.data_context_specific) - - def test_encode_with_private_class(self): - ret = encode_implicit(6, self.data, "private") - - self.assertEqual(ret, self.data_private) - - def test_encode_with_invalid_class(self): - with self.assertRaises(ValueError) as e: - encode_implicit(6, self.data, "foobar") - - self.assertIn("invalid tag class", str(e.exception)) - - def test_encode_with_too_large_tag(self): - with self.assertRaises(ValueError) as e: - encode_implicit(32, self.data) - - self.assertIn("Long tags not supported", str(e.exception)) - - -class TestRemoveOctetString(unittest.TestCase): - def test_simple(self): - data = b"\x04\x03\xaa\xbb\xcc" - body, rest = remove_octet_string(data) - self.assertEqual(body, b"\xaa\xbb\xcc") - self.assertEqual(rest, b"") - - def test_with_malformed_tag(self): - data = b"\x03\x03\xaa\xbb\xcc" - with self.assertRaises(UnexpectedDER) as e: - remove_octet_string(data) - - self.assertIn("octetstring", str(e.exception)) - - -class TestRemoveSequence(unittest.TestCase): - def test_simple(self): - data = b"\x30\x02\xff\xaa" - body, rest = remove_sequence(data) - self.assertEqual(body, b"\xff\xaa") - self.assertEqual(rest, b"") - - def test_with_empty_string(self): - with self.assertRaises(UnexpectedDER) as e: - remove_sequence(b"") - - self.assertIn("Empty string", str(e.exception)) - - def test_with_wrong_tag(self): - data = b"\x20\x02\xff\xaa" - - with self.assertRaises(UnexpectedDER) as e: - remove_sequence(data) - - self.assertIn("wanted type 'sequence'", str(e.exception)) - - def test_with_wrong_length(self): - data = b"\x30\x03\xff\xaa" - - with self.assertRaises(UnexpectedDER) as e: - remove_sequence(data) - - self.assertIn("Length longer", str(e.exception)) - - -@st.composite -def st_oid(draw, max_value=2**512, max_size=50): - """ - Hypothesis strategy that returns valid OBJECT IDENTIFIERs as tuples - - :param max_value: maximum value of any single sub-identifier - :param max_size: maximum length of the generated OID - """ - first = draw(st.integers(min_value=0, max_value=2)) - if first < 2: - second = draw(st.integers(min_value=0, max_value=39)) - else: - second = draw(st.integers(min_value=0, max_value=max_value)) - rest = draw( - st.lists( - st.integers(min_value=0, max_value=max_value), max_size=max_size - ) - ) - return (first, second) + tuple(rest) - - -HYP_SETTINGS = {} - - -if "--fast" in sys.argv: # pragma: no cover - HYP_SETTINGS["max_examples"] = 2 - - -@settings(**HYP_SETTINGS) -@given(st_oid()) -def test_oids(ids): - encoded_oid = encode_oid(*ids) - decoded_oid, rest = remove_object(encoded_oid) - assert rest == b"" - assert decoded_oid == ids diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_ecdh.py b/venv/lib/python3.12/site-packages/ecdsa/test_ecdh.py deleted file mode 100644 index cb225803..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_ecdh.py +++ /dev/null @@ -1,449 +0,0 @@ -import os -import sys -import shutil -import subprocess -import pytest -from binascii import unhexlify - -try: - import unittest2 as unittest -except ImportError: - import unittest - -from .curves import ( - NIST192p, - NIST224p, - NIST256p, - NIST384p, - NIST521p, - BRAINPOOLP160r1, - SECP112r2, - SECP128r1, -) -from .curves import curves -from .ecdh import ( - ECDH, - InvalidCurveError, - InvalidSharedSecretError, - NoKeyError, - NoCurveError, -) -from .keys import SigningKey, VerifyingKey -from .ellipticcurve import CurveEdTw - - -if "--fast" in sys.argv: # pragma: no cover - curves = [SECP112r2, SECP128r1] - - -@pytest.mark.parametrize( - "vcurve", - curves, - ids=[curve.name for curve in curves], -) -def test_ecdh_each(vcurve): - if isinstance(vcurve.curve, CurveEdTw): - pytest.skip("ECDH is not supported for Edwards curves") - ecdh1 = ECDH(curve=vcurve) - ecdh2 = ECDH(curve=vcurve) - - ecdh2.generate_private_key() - ecdh1.load_received_public_key(ecdh2.get_public_key()) - ecdh2.load_received_public_key(ecdh1.generate_private_key()) - - secret1 = ecdh1.generate_sharedsecret_bytes() - secret2 = ecdh2.generate_sharedsecret_bytes() - assert secret1 == secret2 - - -def test_ecdh_both_keys_present(): - key1 = SigningKey.generate(BRAINPOOLP160r1) - key2 = SigningKey.generate(BRAINPOOLP160r1) - - ecdh1 = ECDH(BRAINPOOLP160r1, key1, key2.verifying_key) - ecdh2 = ECDH(private_key=key2, public_key=key1.verifying_key) - - secret1 = ecdh1.generate_sharedsecret_bytes() - secret2 = ecdh2.generate_sharedsecret_bytes() - - assert secret1 == secret2 - - -def test_ecdh_no_public_key(): - ecdh1 = ECDH(curve=NIST192p) - - with pytest.raises(NoKeyError): - ecdh1.generate_sharedsecret_bytes() - - ecdh1.generate_private_key() - - with pytest.raises(NoKeyError): - ecdh1.generate_sharedsecret_bytes() - - -class TestECDH(unittest.TestCase): - def test_load_key_from_wrong_curve(self): - ecdh1 = ECDH() - ecdh1.set_curve(NIST192p) - - key1 = SigningKey.generate(BRAINPOOLP160r1) - - with self.assertRaises(InvalidCurveError) as e: - ecdh1.load_private_key(key1) - - self.assertIn("Curve mismatch", str(e.exception)) - - def test_generate_without_curve(self): - ecdh1 = ECDH() - - with self.assertRaises(NoCurveError) as e: - ecdh1.generate_private_key() - - self.assertIn("Curve must be set", str(e.exception)) - - def test_load_bytes_without_curve_set(self): - ecdh1 = ECDH() - - with self.assertRaises(NoCurveError) as e: - ecdh1.load_private_key_bytes(b"\x01" * 32) - - self.assertIn("Curve must be set", str(e.exception)) - - def test_set_curve_from_received_public_key(self): - ecdh1 = ECDH() - - key1 = SigningKey.generate(BRAINPOOLP160r1) - - ecdh1.load_received_public_key(key1.verifying_key) - - self.assertEqual(ecdh1.curve, BRAINPOOLP160r1) - - -def test_ecdh_wrong_public_key_curve(): - ecdh1 = ECDH(curve=NIST192p) - ecdh1.generate_private_key() - ecdh2 = ECDH(curve=NIST256p) - ecdh2.generate_private_key() - - with pytest.raises(InvalidCurveError): - ecdh1.load_received_public_key(ecdh2.get_public_key()) - - with pytest.raises(InvalidCurveError): - ecdh2.load_received_public_key(ecdh1.get_public_key()) - - ecdh1.public_key = ecdh2.get_public_key() - ecdh2.public_key = ecdh1.get_public_key() - - with pytest.raises(InvalidCurveError): - ecdh1.generate_sharedsecret_bytes() - - with pytest.raises(InvalidCurveError): - ecdh2.generate_sharedsecret_bytes() - - -def test_ecdh_invalid_shared_secret_curve(): - ecdh1 = ECDH(curve=NIST256p) - ecdh1.generate_private_key() - - ecdh1.load_received_public_key( - SigningKey.generate(NIST256p).get_verifying_key() - ) - - ecdh1.private_key.privkey.secret_multiplier = ecdh1.private_key.curve.order - - with pytest.raises(InvalidSharedSecretError): - ecdh1.generate_sharedsecret_bytes() - - -# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp192r1.txt -# https://github.com/scogliani/ecc-test-vectors/blob/master/ecdh_kat/secp256r1.txt -# https://github.com/coruus/nist-testvectors/blob/master/csrc.nist.gov/groups/STM/cavp/documents/components/ecccdhtestvectors/KAS_ECC_CDH_PrimitiveTest.txt -@pytest.mark.parametrize( - "curve,privatekey,pubkey,secret", - [ - pytest.param( - NIST192p, - "f17d3fea367b74d340851ca4270dcb24c271f445bed9d527", - "42ea6dd9969dd2a61fea1aac7f8e98edcc896c6e55857cc0" - "dfbe5d7c61fac88b11811bde328e8a0d12bf01a9d204b523", - "803d8ab2e5b6e6fca715737c3a82f7ce3c783124f6d51cd0", - id="NIST192p-1", - ), - pytest.param( - NIST192p, - "56e853349d96fe4c442448dacb7cf92bb7a95dcf574a9bd5", - "deb5712fa027ac8d2f22c455ccb73a91e17b6512b5e030e7" - "7e2690a02cc9b28708431a29fb54b87b1f0c14e011ac2125", - "c208847568b98835d7312cef1f97f7aa298283152313c29d", - id="NIST192p-2", - ), - pytest.param( - NIST192p, - "c6ef61fe12e80bf56f2d3f7d0bb757394519906d55500949", - "4edaa8efc5a0f40f843663ec5815e7762dddc008e663c20f" - "0a9f8dc67a3e60ef6d64b522185d03df1fc0adfd42478279", - "87229107047a3b611920d6e3b2c0c89bea4f49412260b8dd", - id="NIST192p-3", - ), - pytest.param( - NIST192p, - "e6747b9c23ba7044f38ff7e62c35e4038920f5a0163d3cda", - "8887c276edeed3e9e866b46d58d895c73fbd80b63e382e88" - "04c5097ba6645e16206cfb70f7052655947dd44a17f1f9d5", - "eec0bed8fc55e1feddc82158fd6dc0d48a4d796aaf47d46c", - id="NIST192p-4", - ), - pytest.param( - NIST192p, - "beabedd0154a1afcfc85d52181c10f5eb47adc51f655047d", - "0d045f30254adc1fcefa8a5b1f31bf4e739dd327cd18d594" - "542c314e41427c08278a08ce8d7305f3b5b849c72d8aff73", - "716e743b1b37a2cd8479f0a3d5a74c10ba2599be18d7e2f4", - id="NIST192p-5", - ), - pytest.param( - NIST192p, - "cf70354226667321d6e2baf40999e2fd74c7a0f793fa8699", - "fb35ca20d2e96665c51b98e8f6eb3d79113508d8bccd4516" - "368eec0d5bfb847721df6aaff0e5d48c444f74bf9cd8a5a7", - "f67053b934459985a315cb017bf0302891798d45d0e19508", - id="NIST192p-6", - ), - pytest.param( - NIST224p, - "8346a60fc6f293ca5a0d2af68ba71d1dd389e5e40837942df3e43cbd", - "af33cd0629bc7e996320a3f40368f74de8704fa37b8fab69abaae280" - "882092ccbba7930f419a8a4f9bb16978bbc3838729992559a6f2e2d7", - "7d96f9a3bd3c05cf5cc37feb8b9d5209d5c2597464dec3e9983743e8", - id="NIST224p", - ), - pytest.param( - NIST256p, - "7d7dc5f71eb29ddaf80d6214632eeae03d9058af1fb6d22ed80badb62bc1a534", - "700c48f77f56584c5cc632ca65640db91b6bacce3a4df6b42ce7cc838833d287" - "db71e509e3fd9b060ddb20ba5c51dcc5948d46fbf640dfe0441782cab85fa4ac", - "46fc62106420ff012e54a434fbdd2d25ccc5852060561e68040dd7778997bd7b", - id="NIST256p-1", - ), - pytest.param( - NIST256p, - "38f65d6dce47676044d58ce5139582d568f64bb16098d179dbab07741dd5caf5", - "809f04289c64348c01515eb03d5ce7ac1a8cb9498f5caa50197e58d43a86a7ae" - "b29d84e811197f25eba8f5194092cb6ff440e26d4421011372461f579271cda3", - "057d636096cb80b67a8c038c890e887d1adfa4195e9b3ce241c8a778c59cda67", - id="NIST256p-2", - ), - pytest.param( - NIST256p, - "1accfaf1b97712b85a6f54b148985a1bdc4c9bec0bd258cad4b3d603f49f32c8", - "a2339c12d4a03c33546de533268b4ad667debf458b464d77443636440ee7fec3" - "ef48a3ab26e20220bcda2c1851076839dae88eae962869a497bf73cb66faf536", - "2d457b78b4614132477618a5b077965ec90730a8c81a1c75d6d4ec68005d67ec", - id="NIST256p-3", - ), - pytest.param( - NIST256p, - "207c43a79bfee03db6f4b944f53d2fb76cc49ef1c9c4d34d51b6c65c4db6932d", - "df3989b9fa55495719b3cf46dccd28b5153f7808191dd518eff0c3cff2b705ed" - "422294ff46003429d739a33206c8752552c8ba54a270defc06e221e0feaf6ac4", - "96441259534b80f6aee3d287a6bb17b5094dd4277d9e294f8fe73e48bf2a0024", - id="NIST256p-4", - ), - pytest.param( - NIST256p, - "59137e38152350b195c9718d39673d519838055ad908dd4757152fd8255c09bf", - "41192d2813e79561e6a1d6f53c8bc1a433a199c835e141b05a74a97b0faeb922" - "1af98cc45e98a7e041b01cf35f462b7562281351c8ebf3ffa02e33a0722a1328", - "19d44c8d63e8e8dd12c22a87b8cd4ece27acdde04dbf47f7f27537a6999a8e62", - id="NIST256p-5", - ), - pytest.param( - NIST256p, - "f5f8e0174610a661277979b58ce5c90fee6c9b3bb346a90a7196255e40b132ef", - "33e82092a0f1fb38f5649d5867fba28b503172b7035574bf8e5b7100a3052792" - "f2cf6b601e0a05945e335550bf648d782f46186c772c0f20d3cd0d6b8ca14b2f", - "664e45d5bba4ac931cd65d52017e4be9b19a515f669bea4703542a2c525cd3d3", - id="NIST256p-6", - ), - pytest.param( - NIST384p, - "3cc3122a68f0d95027ad38c067916ba0eb8c38894d22e1b1" - "5618b6818a661774ad463b205da88cf699ab4d43c9cf98a1", - "a7c76b970c3b5fe8b05d2838ae04ab47697b9eaf52e76459" - "2efda27fe7513272734466b400091adbf2d68c58e0c50066" - "ac68f19f2e1cb879aed43a9969b91a0839c4c38a49749b66" - "1efedf243451915ed0905a32b060992b468c64766fc8437a", - "5f9d29dc5e31a163060356213669c8ce132e22f57c9a04f4" - "0ba7fcead493b457e5621e766c40a2e3d4d6a04b25e533f1", - id="NIST384p", - ), - pytest.param( - NIST521p, - "017eecc07ab4b329068fba65e56a1f8890aa935e57134ae0ffcce802735151f4ea" - "c6564f6ee9974c5e6887a1fefee5743ae2241bfeb95d5ce31ddcb6f9edb4d6fc47", - "00685a48e86c79f0f0875f7bc18d25eb5fc8c0b07e5da4f4370f3a949034085433" - "4b1e1b87fa395464c60626124a4e70d0f785601d37c09870ebf176666877a2046d" - "01ba52c56fc8776d9e8f5db4f0cc27636d0b741bbe05400697942e80b739884a83" - "bde99e0f6716939e632bc8986fa18dccd443a348b6c3e522497955a4f3c302f676", - "005fc70477c3e63bc3954bd0df3ea0d1f41ee21746ed95fc5e1fdf90930d5e1366" - "72d72cc770742d1711c3c3a4c334a0ad9759436a4d3c5bf6e74b9578fac148c831", - id="NIST521p", - ), - ], -) -def test_ecdh_NIST(curve, privatekey, pubkey, secret): - ecdh = ECDH(curve=curve) - ecdh.load_private_key_bytes(unhexlify(privatekey)) - ecdh.load_received_public_key_bytes(unhexlify(pubkey)) - - sharedsecret = ecdh.generate_sharedsecret_bytes() - - assert sharedsecret == unhexlify(secret) - - -pem_local_private_key = ( - "-----BEGIN EC PRIVATE KEY-----\n" - "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" - "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" - "bA==\n" - "-----END EC PRIVATE KEY-----\n" -) -der_local_private_key = ( - "305f02010104185ec8420bd6ef9252a942e989043ca29f561fa525770eb1c5a00a06082a864" - "8ce3d030101a13403320004b88177d084ef17f5e45639408028360f9f59b4a4d7264e62da06" - "51dce47a35a4c5b45cf51593423a8b557b9c2099f36c" -) -pem_remote_public_key = ( - "-----BEGIN PUBLIC KEY-----\n" - "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" - "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" - "-----END PUBLIC KEY-----\n" -) -der_remote_public_key = ( - "3049301306072a8648ce3d020106082a8648ce3d03010103320004b88177d084ef17f5e4563" - "9408028360f9f59b4a4d7264e62da0651dce47a35a4c5b45cf51593423a8b557b9c2099f36c" -) -gshared_secret = "8f457e34982478d1c34b9cd2d0c15911b72dd60d869e2cea" - - -def test_ecdh_pem(): - ecdh = ECDH() - ecdh.load_private_key_pem(pem_local_private_key) - ecdh.load_received_public_key_pem(pem_remote_public_key) - - sharedsecret = ecdh.generate_sharedsecret_bytes() - - assert sharedsecret == unhexlify(gshared_secret) - - -def test_ecdh_der(): - ecdh = ECDH() - ecdh.load_private_key_der(unhexlify(der_local_private_key)) - ecdh.load_received_public_key_der(unhexlify(der_remote_public_key)) - - sharedsecret = ecdh.generate_sharedsecret_bytes() - - assert sharedsecret == unhexlify(gshared_secret) - - -# Exception classes used by run_openssl. -class RunOpenSslError(Exception): - pass - - -def run_openssl(cmd): - OPENSSL = "openssl" - p = subprocess.Popen( - [OPENSSL] + cmd.split(), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - stdout, ignored = p.communicate() - if p.returncode != 0: - raise RunOpenSslError( - "cmd '%s %s' failed: rc=%s, stdout/err was %s" - % (OPENSSL, cmd, p.returncode, stdout) - ) - return stdout.decode() - - -OPENSSL_SUPPORTED_CURVES = set( - c.split(":")[0].strip() - for c in run_openssl("ecparam -list_curves").split("\n") -) - - -@pytest.mark.slow -@pytest.mark.parametrize( - "vcurve", - curves, - ids=[curve.name for curve in curves], -) -def test_ecdh_with_openssl(vcurve): - if isinstance(vcurve.curve, CurveEdTw): - pytest.skip("Edwards curves are not supported for ECDH") - - assert vcurve.openssl_name - - if vcurve.openssl_name not in OPENSSL_SUPPORTED_CURVES: - pytest.skip("system openssl does not support " + vcurve.openssl_name) - - try: - hlp = run_openssl("pkeyutl -help") - if hlp.find("-derive") == 0: # pragma: no cover - pytest.skip("system openssl does not support `pkeyutl -derive`") - except RunOpenSslError: # pragma: no cover - pytest.skip("system openssl could not be executed") - - if os.path.isdir("t"): # pragma: no branch - shutil.rmtree("t") - os.mkdir("t") - run_openssl( - "ecparam -name %s -genkey -out t/privkey1.pem" % vcurve.openssl_name - ) - run_openssl( - "ecparam -name %s -genkey -out t/privkey2.pem" % vcurve.openssl_name - ) - run_openssl("ec -in t/privkey1.pem -pubout -out t/pubkey1.pem") - - ecdh1 = ECDH(curve=vcurve) - ecdh2 = ECDH(curve=vcurve) - with open("t/privkey1.pem") as e: - key = e.read() - ecdh1.load_private_key_pem(key) - with open("t/privkey2.pem") as e: - key = e.read() - ecdh2.load_private_key_pem(key) - - with open("t/pubkey1.pem") as e: - key = e.read() - vk1 = VerifyingKey.from_pem(key) - assert vk1.to_string() == ecdh1.get_public_key().to_string() - vk2 = ecdh2.get_public_key() - with open("t/pubkey2.pem", "wb") as e: - e.write(vk2.to_pem()) - - ecdh1.load_received_public_key(vk2) - ecdh2.load_received_public_key(vk1) - secret1 = ecdh1.generate_sharedsecret_bytes() - secret2 = ecdh2.generate_sharedsecret_bytes() - - assert secret1 == secret2 - - run_openssl( - "pkeyutl -derive -inkey t/privkey1.pem -peerkey t/pubkey2.pem -out t/secret1" - ) - run_openssl( - "pkeyutl -derive -inkey t/privkey2.pem -peerkey t/pubkey1.pem -out t/secret2" - ) - - with open("t/secret1", "rb") as e: - ssl_secret1 = e.read() - with open("t/secret1", "rb") as e: - ssl_secret2 = e.read() - - assert len(ssl_secret1) == vk1.curve.verifying_key_length // 2 - assert len(secret1) == vk1.curve.verifying_key_length // 2 - - assert ssl_secret1 == ssl_secret2 - assert secret1 == ssl_secret1 diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_ecdsa.py b/venv/lib/python3.12/site-packages/ecdsa/test_ecdsa.py deleted file mode 100644 index c1e25829..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_ecdsa.py +++ /dev/null @@ -1,694 +0,0 @@ -from __future__ import print_function -import sys -import hypothesis.strategies as st -from hypothesis import given, settings, note, example - -try: - import unittest2 as unittest -except ImportError: - import unittest -import pytest -from .ecdsa import ( - Private_key, - Public_key, - Signature, - generator_192, - digest_integer, - ellipticcurve, - point_is_valid, - generator_224, - generator_256, - generator_384, - generator_521, - generator_secp256k1, - curve_192, - InvalidPointError, - curve_112r2, - generator_112r2, - int_to_string, -) -from .ellipticcurve import Point - - -HYP_SETTINGS = {} -# old hypothesis doesn't have the "deadline" setting -if sys.version_info > (2, 7): # pragma: no branch - # SEC521p is slow, allow long execution for it - HYP_SETTINGS["deadline"] = 5000 - - -class TestP192FromX9_62(unittest.TestCase): - """Check test vectors from X9.62""" - - @classmethod - def setUpClass(cls): - cls.d = 651056770906015076056810763456358567190100156695615665659 - cls.Q = cls.d * generator_192 - cls.k = 6140507067065001063065065565667405560006161556565665656654 - cls.R = cls.k * generator_192 - - cls.msg = 968236873715988614170569073515315707566766479517 - cls.pubk = Public_key(generator_192, generator_192 * cls.d) - cls.privk = Private_key(cls.pubk, cls.d) - cls.sig = cls.privk.sign(cls.msg, cls.k) - - def test_point_multiplication(self): - assert self.Q.x() == 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 - - def test_point_multiplication_2(self): - assert self.R.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD - assert self.R.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 - - def test_mult_and_addition(self): - u1 = 2563697409189434185194736134579731015366492496392189760599 - u2 = 6266643813348617967186477710235785849136406323338782220568 - temp = u1 * generator_192 + u2 * self.Q - assert temp.x() == 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD - assert temp.y() == 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 - - def test_signature(self): - r, s = self.sig.r, self.sig.s - assert r == 3342403536405981729393488334694600415596881826869351677613 - assert s == 5735822328888155254683894997897571951568553642892029982342 - - def test_verification(self): - assert self.pubk.verifies(self.msg, self.sig) - - def test_rejection(self): - assert not self.pubk.verifies(self.msg - 1, self.sig) - - def test_verification_with_regular_point(self): - pubk = Public_key( - Point( - generator_192.curve(), - generator_192.x(), - generator_192.y(), - generator_192.order(), - ), - self.pubk.point, - ) - - assert pubk.verifies(self.msg, self.sig) - - -class TestPublicKey(unittest.TestCase): - def test_equality_public_keys(self): - gen = generator_192 - x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 - y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F - point = ellipticcurve.Point(gen.curve(), x, y) - pub_key1 = Public_key(gen, point) - pub_key2 = Public_key(gen, point) - self.assertEqual(pub_key1, pub_key2) - - def test_inequality_public_key(self): - gen = generator_192 - x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 - y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F - point1 = ellipticcurve.Point(gen.curve(), x1, y1) - - x2 = 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15 - y2 = 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF - point2 = ellipticcurve.Point(gen.curve(), x2, y2) - - pub_key1 = Public_key(gen, point1) - pub_key2 = Public_key(gen, point2) - self.assertNotEqual(pub_key1, pub_key2) - - def test_inequality_different_curves(self): - gen = generator_192 - x1 = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 - y1 = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F - point1 = ellipticcurve.Point(gen.curve(), x1, y1) - - x2 = 0x722BA0FB6B8FC8898A4C6AB49E66 - y2 = 0x2B7344BB57A7ABC8CA0F1A398C7D - point2 = ellipticcurve.Point(generator_112r2.curve(), x2, y2) - - pub_key1 = Public_key(gen, point1) - pub_key2 = Public_key(generator_112r2, point2) - self.assertNotEqual(pub_key1, pub_key2) - - def test_inequality_public_key_not_implemented(self): - gen = generator_192 - x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 - y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F - point = ellipticcurve.Point(gen.curve(), x, y) - pub_key = Public_key(gen, point) - self.assertNotEqual(pub_key, None) - - def test_public_key_with_generator_without_order(self): - gen = ellipticcurve.PointJacobi( - generator_192.curve(), generator_192.x(), generator_192.y(), 1 - ) - - x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 - y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F - point = ellipticcurve.Point(gen.curve(), x, y) - - with self.assertRaises(InvalidPointError) as e: - Public_key(gen, point) - - self.assertIn("Generator point must have order", str(e.exception)) - - def test_public_point_on_curve_not_scalar_multiple_of_base_point(self): - x = 2 - y = 0xBE6AA4938EF7CFE6FE29595B6B00 - # we need a curve with cofactor != 1 - point = ellipticcurve.PointJacobi(curve_112r2, x, y, 1) - - self.assertTrue(curve_112r2.contains_point(x, y)) - - with self.assertRaises(InvalidPointError) as e: - Public_key(generator_112r2, point) - - self.assertIn("Generator point order", str(e.exception)) - - def test_point_is_valid_with_not_scalar_multiple_of_base_point(self): - x = 2 - y = 0xBE6AA4938EF7CFE6FE29595B6B00 - - self.assertFalse(point_is_valid(generator_112r2, x, y)) - - # the tests to verify the extensiveness of tests in ecdsa.ecdsa - # if PointJacobi gets modified to calculate the x and y mod p the tests - # below will need to use a fake/mock object - def test_invalid_point_x_negative(self): - pt = ellipticcurve.PointJacobi(curve_192, -1, 0, 1) - - with self.assertRaises(InvalidPointError) as e: - Public_key(generator_192, pt) - - self.assertIn("The public point has x or y", str(e.exception)) - - def test_invalid_point_x_equal_p(self): - pt = ellipticcurve.PointJacobi(curve_192, curve_192.p(), 0, 1) - - with self.assertRaises(InvalidPointError) as e: - Public_key(generator_192, pt) - - self.assertIn("The public point has x or y", str(e.exception)) - - def test_invalid_point_y_negative(self): - pt = ellipticcurve.PointJacobi(curve_192, 0, -1, 1) - - with self.assertRaises(InvalidPointError) as e: - Public_key(generator_192, pt) - - self.assertIn("The public point has x or y", str(e.exception)) - - def test_invalid_point_y_equal_p(self): - pt = ellipticcurve.PointJacobi(curve_192, 0, curve_192.p(), 1) - - with self.assertRaises(InvalidPointError) as e: - Public_key(generator_192, pt) - - self.assertIn("The public point has x or y", str(e.exception)) - - -class TestPublicKeyVerifies(unittest.TestCase): - # test all the different ways that a signature can be publicly invalid - @classmethod - def setUpClass(cls): - gen = generator_192 - x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 - y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F - point = ellipticcurve.Point(gen.curve(), x, y) - - cls.pub_key = Public_key(gen, point) - - def test_sig_with_r_zero(self): - sig = Signature(0, 1) - - self.assertFalse(self.pub_key.verifies(1, sig)) - - def test_sig_with_r_order(self): - sig = Signature(generator_192.order(), 1) - - self.assertFalse(self.pub_key.verifies(1, sig)) - - def test_sig_with_s_zero(self): - sig = Signature(1, 0) - - self.assertFalse(self.pub_key.verifies(1, sig)) - - def test_sig_with_s_order(self): - sig = Signature(1, generator_192.order()) - - self.assertFalse(self.pub_key.verifies(1, sig)) - - -class TestPrivateKey(unittest.TestCase): - @classmethod - def setUpClass(cls): - gen = generator_192 - x = 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6 - y = 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F - point = ellipticcurve.Point(gen.curve(), x, y) - cls.pub_key = Public_key(gen, point) - - def test_equality_private_keys(self): - pr_key1 = Private_key(self.pub_key, 100) - pr_key2 = Private_key(self.pub_key, 100) - self.assertEqual(pr_key1, pr_key2) - - def test_inequality_private_keys(self): - pr_key1 = Private_key(self.pub_key, 100) - pr_key2 = Private_key(self.pub_key, 200) - self.assertNotEqual(pr_key1, pr_key2) - - def test_inequality_private_keys_not_implemented(self): - pr_key = Private_key(self.pub_key, 100) - self.assertNotEqual(pr_key, None) - - -# Testing point validity, as per ECDSAVS.pdf B.2.2: -P192_POINTS = [ - ( - generator_192, - 0xCD6D0F029A023E9AACA429615B8F577ABEE685D8257CC83A, - 0x00019C410987680E9FB6C0B6ECC01D9A2647C8BAE27721BACDFC, - False, - ), - ( - generator_192, - 0x00017F2FCE203639E9EAF9FB50B81FC32776B30E3B02AF16C73B, - 0x95DA95C5E72DD48E229D4748D4EEE658A9A54111B23B2ADB, - False, - ), - ( - generator_192, - 0x4F77F8BC7FCCBADD5760F4938746D5F253EE2168C1CF2792, - 0x000147156FF824D131629739817EDB197717C41AAB5C2A70F0F6, - False, - ), - ( - generator_192, - 0xC58D61F88D905293BCD4CD0080BCB1B7F811F2FFA41979F6, - 0x8804DC7A7C4C7F8B5D437F5156F3312CA7D6DE8A0E11867F, - True, - ), - ( - generator_192, - 0xCDF56C1AA3D8AFC53C521ADF3FFB96734A6A630A4A5B5A70, - 0x97C1C44A5FB229007B5EC5D25F7413D170068FFD023CAA4E, - True, - ), - ( - generator_192, - 0x89009C0DC361C81E99280C8E91DF578DF88CDF4B0CDEDCED, - 0x27BE44A529B7513E727251F128B34262A0FD4D8EC82377B9, - True, - ), - ( - generator_192, - 0x6A223D00BD22C52833409A163E057E5B5DA1DEF2A197DD15, - 0x7B482604199367F1F303F9EF627F922F97023E90EAE08ABF, - True, - ), - ( - generator_192, - 0x6DCCBDE75C0948C98DAB32EA0BC59FE125CF0FB1A3798EDA, - 0x0001171A3E0FA60CF3096F4E116B556198DE430E1FBD330C8835, - False, - ), - ( - generator_192, - 0xD266B39E1F491FC4ACBBBC7D098430931CFA66D55015AF12, - 0x193782EB909E391A3148B7764E6B234AA94E48D30A16DBB2, - False, - ), - ( - generator_192, - 0x9D6DDBCD439BAA0C6B80A654091680E462A7D1D3F1FFEB43, - 0x6AD8EFC4D133CCF167C44EB4691C80ABFFB9F82B932B8CAA, - False, - ), - ( - generator_192, - 0x146479D944E6BDA87E5B35818AA666A4C998A71F4E95EDBC, - 0xA86D6FE62BC8FBD88139693F842635F687F132255858E7F6, - False, - ), - ( - generator_192, - 0xE594D4A598046F3598243F50FD2C7BD7D380EDB055802253, - 0x509014C0C4D6B536E3CA750EC09066AF39B4C8616A53A923, - False, - ), -] - - -@pytest.mark.parametrize("generator,x,y,expected", P192_POINTS) -def test_point_validity(generator, x, y, expected): - """ - `generator` defines the curve; is `(x, y)` a point on - this curve? `expected` is True if the right answer is Yes. - """ - assert point_is_valid(generator, x, y) == expected - - -# Trying signature-verification tests from ECDSAVS.pdf B.2.4: -CURVE_192_KATS = [ - ( - generator_192, - int( - "0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee" - "425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30" - "d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff79" - "8cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d1" - "58", - 16, - ), - 0xD9DBFB332AA8E5FF091E8CE535857C37C73F6250FFB2E7AC, - 0x282102E364FEDED3AD15DDF968F88D8321AA268DD483EBC4, - 0x64DCA58A20787C488D11D6DD96313F1B766F2D8EFE122916, - 0x1ECBA28141E84AB4ECAD92F56720E2CC83EB3D22DEC72479, - True, - ), - ( - generator_192, - int( - "0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db1" - "2e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a" - "91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db3" - "26ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63" - "f4", - 16, - ), - 0x3E53EF8D3112AF3285C0E74842090712CD324832D4277AE7, - 0xCC75F8952D30AEC2CBB719FC6AA9934590B5D0FF5A83ADB7, - 0x8285261607283BA18F335026130BAB31840DCFD9C3E555AF, - 0x356D89E1B04541AFC9704A45E9C535CE4A50929E33D7E06C, - True, - ), - ( - generator_192, - int( - "0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911" - "b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cd" - "d41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d30" - "3f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42" - "dd", - 16, - ), - 0x16335DBE95F8E8254A4E04575D736BEFB258B8657F773CB7, - 0x421B13379C59BC9DCE38A1099CA79BBD06D647C7F6242336, - 0x4141BD5D64EA36C5B0BD21EF28C02DA216ED9D04522B1E91, - 0x159A6AA852BCC579E821B7BB0994C0861FB08280C38DAA09, - False, - ), - ( - generator_192, - int( - "0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b56309" - "7ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8" - "bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447" - "bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd" - "8a", - 16, - ), - 0xFD14CDF1607F5EFB7B1793037B15BDF4BAA6F7C16341AB0B, - 0x83FA0795CC6C4795B9016DAC928FD6BAC32F3229A96312C4, - 0x8DFDB832951E0167C5D762A473C0416C5C15BC1195667DC1, - 0x1720288A2DC13FA1EC78F763F8FE2FF7354A7E6FDDE44520, - False, - ), - ( - generator_192, - int( - "0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d3919" - "2e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196" - "683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bc" - "eae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072" - "fb", - 16, - ), - 0x674F941DC1A1F8B763C9334D726172D527B90CA324DB8828, - 0x65ADFA32E8B236CB33A3E84CF59BFB9417AE7E8EDE57A7FF, - 0x9508B9FDD7DAF0D8126F9E2BC5A35E4C6D800B5B804D7796, - 0x36F2BF6B21B987C77B53BB801B3435A577E3D493744BFAB0, - False, - ), - ( - generator_192, - int( - "0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397c" - "e15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aa" - "e98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc" - "55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca" - "6d", - 16, - ), - 0x10ECCA1AAD7220B56A62008B35170BFD5E35885C4014A19F, - 0x04EB61984C6C12ADE3BC47F3C629ECE7AA0A033B9948D686, - 0x82BFA4E82C0DFE9274169B86694E76CE993FD83B5C60F325, - 0xA97685676C59A65DBDE002FE9D613431FB183E8006D05633, - False, - ), - ( - generator_192, - int( - "0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f" - "698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98" - "f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a2" - "78461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76" - "e1", - 16, - ), - 0x6636653CB5B894CA65C448277B29DA3AD101C4C2300F7C04, - 0xFDF1CBB3FC3FD6A4F890B59E554544175FA77DBDBEB656C1, - 0xEAC2DDECDDFB79931A9C3D49C08DE0645C783A24CB365E1C, - 0x3549FEE3CFA7E5F93BC47D92D8BA100E881A2A93C22F8D50, - False, - ), - ( - generator_192, - int( - "0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6" - "c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7" - "a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b" - "9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6b" - "a2", - 16, - ), - 0xA82BD718D01D354001148CD5F69B9EBF38FF6F21898F8AAA, - 0xE67CEEDE07FC2EBFAFD62462A51E4B6C6B3D5B537B7CAF3E, - 0x4D292486C620C3DE20856E57D3BB72FCDE4A73AD26376955, - 0xA85289591A6081D5728825520E62FF1C64F94235C04C7F95, - False, - ), - ( - generator_192, - int( - "0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a" - "961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc91" - "0250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53" - "808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb6" - "58", - 16, - ), - 0x7D3B016B57758B160C4FCA73D48DF07AE3B6B30225126C2F, - 0x4AF3790D9775742BDE46F8DA876711BE1B65244B2B39E7EC, - 0x95F778F5F656511A5AB49A5D69DDD0929563C29CBC3A9E62, - 0x75C87FC358C251B4C83D2DD979FAAD496B539F9F2EE7A289, - False, - ), - ( - generator_192, - int( - "0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e102" - "88acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c9" - "0a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9e" - "a387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c9" - "7a", - 16, - ), - 0x9362F28C4EF96453D8A2F849F21E881CD7566887DA8BEB4A, - 0xE64D26D8D74C48A024AE85D982EE74CD16046F4EE5333905, - 0xF3923476A296C88287E8DE914B0B324AD5A963319A4FE73B, - 0xF0BAEED7624ED00D15244D8BA2AEDE085517DBDEC8AC65F5, - True, - ), - ( - generator_192, - int( - "0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f645" - "0d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d90" - "64e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8c" - "e1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd045" - "6d", - 16, - ), - 0xCC6FC032A846AAAC25533EB033522824F94E670FA997ECEF, - 0xE25463EF77A029ECCDA8B294FD63DD694E38D223D30862F1, - 0x066B1D07F3A40E679B620EDA7F550842A35C18B80C5EBE06, - 0xA0B0FB201E8F2DF65E2C4508EF303BDC90D934016F16B2DC, - False, - ), - ( - generator_192, - int( - "0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae" - "5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214e" - "ed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c4" - "40341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839" - "d7", - 16, - ), - 0x955C908FE900A996F7E2089BEE2F6376830F76A19135E753, - 0xBA0C42A91D3847DE4A592A46DC3FDAF45A7CC709B90DE520, - 0x1F58AD77FC04C782815A1405B0925E72095D906CBF52A668, - 0xF2E93758B3AF75EDF784F05A6761C9B9A6043C66B845B599, - False, - ), - ( - generator_192, - int( - "0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf99866" - "70a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b412" - "69bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52" - "e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160ce" - "f3", - 16, - ), - 0x31F7FA05576D78A949B24812D4383107A9A45BB5FCCDD835, - 0x8DC0EB65994A90F02B5E19BD18B32D61150746C09107E76B, - 0xBE26D59E4E883DDE7C286614A767B31E49AD88789D3A78FF, - 0x8762CA831C1CE42DF77893C9B03119428E7A9B819B619068, - False, - ), - ( - generator_192, - int( - "0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f" - "387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add502357" - "2720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670" - "716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1", - 16, - ), - 0x66AA8EDBBDB5CF8E28CEB51B5BDA891CAE2DF84819FE25C0, - 0x0C6BC2F69030A7CE58D4A00E3B3349844784A13B8936F8DA, - 0xA4661E69B1734F4A71B788410A464B71E7FFE42334484F23, - 0x738421CF5E049159D69C57A915143E226CAC8355E149AFE9, - False, - ), - ( - generator_192, - int( - "0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5af" - "a261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461" - "184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6d" - "b377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb", - 16, - ), - 0xBCFACF45139B6F5F690A4C35A5FFFA498794136A2353FC77, - 0x6F4A6C906316A6AFC6D98FE1F0399D056F128FE0270B0F22, - 0x9DB679A3DAFE48F7CCAD122933ACFE9DA0970B71C94C21C1, - 0x984C2DB99827576C0A41A5DA41E07D8CC768BC82F18C9DA9, - False, - ), -] - - -@pytest.mark.parametrize("gen,msg,qx,qy,r,s,expected", CURVE_192_KATS) -def test_signature_validity(gen, msg, qx, qy, r, s, expected): - """ - `msg` = message, `qx` and `qy` represent the base point on - elliptic curve of `gen`, `r` and `s` are the signature, and - `expected` is True iff the signature is expected to be valid.""" - pubk = Public_key(gen, ellipticcurve.Point(gen.curve(), qx, qy)) - with pytest.warns(DeprecationWarning) as warns: - msg_dgst = digest_integer(msg) - assert len(warns) == 3 - assert "unused" in warns[0].message.args[0] - assert "unused" in warns[1].message.args[0] - assert "unused" in warns[2].message.args[0] - assert expected == pubk.verifies(msg_dgst, Signature(r, s)) - - -@pytest.mark.parametrize( - "gen,msg,qx,qy,r,s,expected", [x for x in CURVE_192_KATS if x[6]] -) -def test_pk_recovery(gen, msg, r, s, qx, qy, expected): - del expected - sign = Signature(r, s) - with pytest.warns(DeprecationWarning) as warns: - msg_dgst = digest_integer(msg) - assert len(warns) == 3 - assert "unused" in warns[0].message.args[0] - assert "unused" in warns[1].message.args[0] - assert "unused" in warns[2].message.args[0] - pks = sign.recover_public_keys(msg_dgst, gen) - - assert pks - - # Test if the signature is valid for all found public keys - for pk in pks: - q = pk.point - test_signature_validity(gen, msg, q.x(), q.y(), r, s, True) - - # Test if the original public key is in the set of found keys - original_q = ellipticcurve.Point(gen.curve(), qx, qy) - points = [pk.point for pk in pks] - assert original_q in points - - -@st.composite -def st_random_gen_key_msg_nonce(draw): - """Hypothesis strategy for test_sig_verify().""" - name_gen = { - "generator_192": generator_192, - "generator_224": generator_224, - "generator_256": generator_256, - "generator_secp256k1": generator_secp256k1, - "generator_384": generator_384, - "generator_521": generator_521, - } - name = draw(st.sampled_from(sorted(name_gen.keys()))) - note("Generator used: {0}".format(name)) - generator = name_gen[name] - order = int(generator.order()) - 1 - - key = draw(st.integers(min_value=1, max_value=order)) - msg = draw(st.integers(min_value=1, max_value=order)) - nonce = draw( - st.integers(min_value=1, max_value=order) - | st.integers(min_value=order >> 1, max_value=order) - ) - return generator, key, msg, nonce - - -SIG_VER_SETTINGS = dict(HYP_SETTINGS) -if "--fast" in sys.argv: # pragma: no cover - SIG_VER_SETTINGS["max_examples"] = 1 -else: - SIG_VER_SETTINGS["max_examples"] = 10 - - -@settings(**SIG_VER_SETTINGS) -@example((generator_224, 4, 1, 1)) -@given(st_random_gen_key_msg_nonce()) -def test_sig_verify(args): - """ - Check if signing and verification works for arbitrary messages and - that signatures for other messages are rejected. - """ - generator, sec_mult, msg, nonce = args - - pubkey = Public_key(generator, generator * sec_mult) - privkey = Private_key(pubkey, sec_mult) - - signature = privkey.sign(msg, nonce) - - assert pubkey.verifies(msg, signature) - - assert not pubkey.verifies(msg - 1, signature) - - -def test_int_to_string_with_zero(): - with pytest.warns(DeprecationWarning) as warns: - assert int_to_string(0) == b"\x00" - - assert len(warns) == 1 - assert "unused" in warns[0].message.args[0] diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_eddsa.py b/venv/lib/python3.12/site-packages/ecdsa/test_eddsa.py deleted file mode 100644 index 6821b3bc..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_eddsa.py +++ /dev/null @@ -1,1124 +0,0 @@ -import sys -import pickle -import hashlib -import pytest - -try: - import unittest2 as unittest -except ImportError: - import unittest -from hypothesis import given, settings, example -import hypothesis.strategies as st -from .ellipticcurve import PointEdwards, INFINITY, CurveEdTw -from .eddsa import ( - generator_ed25519, - curve_ed25519, - generator_ed448, - curve_ed448, - PrivateKey, - PublicKey, -) -from .ecdsa import generator_256, curve_256 -from .errors import MalformedPointError -from ._compat import a2b_hex, compat26_str - - -class TestA2B_Hex(unittest.TestCase): - def test_invalid_input(self): - with self.assertRaises(ValueError): - a2b_hex("abcdefghi") - - -def test_ed25519_curve_compare(): - assert curve_ed25519 != curve_256 - - -def test_ed25519_and_ed448_compare(): - assert curve_ed448 != curve_ed25519 - - -def test_ed25519_and_custom_curve_compare(): - a = CurveEdTw(curve_ed25519.p(), -curve_ed25519.a(), 1) - - assert curve_ed25519 != a - - -def test_ed25519_and_almost_exact_curve_compare(): - a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), 1) - - assert curve_ed25519 != a - - -def test_ed25519_and_same_curve_params(): - a = CurveEdTw(curve_ed25519.p(), curve_ed25519.a(), curve_ed25519.d()) - - assert curve_ed25519 == a - assert not (curve_ed25519 != a) - - -def test_ed25519_contains_point(): - g = generator_ed25519 - assert curve_ed25519.contains_point(g.x(), g.y()) - - -def test_ed25519_contains_point_bad(): - assert not curve_ed25519.contains_point(1, 1) - - -def test_ed25519_double(): - a = generator_ed25519 - - z = a.double() - - assert isinstance(z, PointEdwards) - - x2 = int( - "24727413235106541002554574571675588834622768167397638456726423" - "682521233608206" - ) - y2 = int( - "15549675580280190176352668710449542251549572066445060580507079" - "593062643049417" - ) - - b = PointEdwards(curve_ed25519, x2, y2, 1, x2 * y2) - - assert z == b - assert a != b - - -def test_ed25519_add_as_double(): - a = generator_ed25519 - - z = a + a - - assert isinstance(z, PointEdwards) - - b = generator_ed25519.double() - - assert z == b - - -def test_ed25519_double_infinity(): - a = PointEdwards(curve_ed25519, 0, 1, 1, 0) - - z = a.double() - - assert z is INFINITY - - -def test_ed25519_double_badly_encoded_infinity(): - # invalid point, mostly to make instrumental happy - a = PointEdwards(curve_ed25519, 1, 1, 1, 0) - - z = a.double() - - assert z is INFINITY - - -def test_ed25519_eq_with_different_z(): - x = generator_ed25519.x() - y = generator_ed25519.y() - p = curve_ed25519.p() - - a = PointEdwards(curve_ed25519, x * 2 % p, y * 2 % p, 2, x * y * 2 % p) - b = PointEdwards(curve_ed25519, x * 3 % p, y * 3 % p, 3, x * y * 3 % p) - - assert a == b - - assert not (a != b) - - -def test_ed25519_eq_against_infinity(): - assert generator_ed25519 != INFINITY - - -def test_ed25519_eq_encoded_infinity_against_infinity(): - a = PointEdwards(curve_ed25519, 0, 1, 1, 0) - assert a == INFINITY - - -def test_ed25519_eq_bad_encode_of_infinity_against_infinity(): - # technically incorrect encoding of the point at infinity, but we check - # both X and T, so verify that just T==0 works - a = PointEdwards(curve_ed25519, 1, 1, 1, 0) - assert a == INFINITY - - -def test_ed25519_eq_against_non_Edwards_point(): - assert generator_ed25519 != generator_256 - - -def test_ed25519_eq_against_negated_point(): - g = generator_ed25519 - neg = PointEdwards(curve_ed25519, -g.x(), g.y(), 1, -g.x() * g.y()) - assert g != neg - - -def test_ed25519_eq_x_different_y(): - # not points on the curve, but __eq__ doesn't care - a = PointEdwards(curve_ed25519, 1, 1, 1, 1) - b = PointEdwards(curve_ed25519, 1, 2, 1, 2) - - assert a != b - - -def test_ed25519_mul_by_order(): - g = PointEdwards( - curve_ed25519, - generator_ed25519.x(), - generator_ed25519.y(), - 1, - generator_ed25519.x() * generator_ed25519.y(), - ) - - assert g * generator_ed25519.order() == INFINITY - - -def test_radd(): - - a = PointEdwards(curve_ed25519, 1, 1, 1, 1) - - p = INFINITY + a - - assert p == a - - -def test_ed25519_test_normalisation_and_scaling(): - x = generator_ed25519.x() - y = generator_ed25519.y() - p = curve_ed25519.p() - - a = PointEdwards(curve_ed25519, x * 11 % p, y * 11 % p, 11, x * y * 11 % p) - - assert a.x() == x - assert a.y() == y - - a.scale() - - assert a.x() == x - assert a.y() == y - - a.scale() # second execution should be a noop - - assert a.x() == x - assert a.y() == y - - -def test_ed25519_add_three_times(): - a = generator_ed25519 - - z = a + a + a - - x3 = int( - "468967334644549386571235445953867877890461982801326656862413" - "21779790909858396" - ) - y3 = int( - "832484377853344397649037712036920113830141722629755531674120" - "2210403726505172" - ) - - b = PointEdwards(curve_ed25519, x3, y3, 1, x3 * y3) - - assert z == b - - -def test_ed25519_add_to_infinity(): - # generator * (order-1) - x1 = int( - "427838232691226969392843410947554224151809796397784248136826" - "78720006717057747" - ) - y1 = int( - "463168356949264781694283940034751631413079938662562256157830" - "33603165251855960" - ) - inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) - - inf = inf_m_1 + generator_ed25519 - - assert inf is INFINITY - - -def test_ed25519_add_and_mul_equivalence(): - g = generator_ed25519 - - assert g + g == g * 2 - assert g + g + g == g * 3 - - -def test_ed25519_add_literal_infinity(): - g = generator_ed25519 - z = g + INFINITY - - assert z == g - - -def test_ed25519_add_infinity(): - inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) - g = generator_ed25519 - z = g + inf - - assert z == g - - z = inf + g - - assert z == g - - -class TestEd25519(unittest.TestCase): - def test_add_wrong_curves(self): - with self.assertRaises(ValueError) as e: - generator_ed25519 + generator_ed448 - - self.assertIn("different curve", str(e.exception)) - - def test_add_wrong_point_type(self): - with self.assertRaises(ValueError) as e: - generator_ed25519 + generator_256 - - self.assertIn("different curve", str(e.exception)) - - -def test_generate_with_point(): - x1 = int( - "427838232691226969392843410947554224151809796397784248136826" - "78720006717057747" - ) - y1 = int( - "463168356949264781694283940034751631413079938662562256157830" - "33603165251855960" - ) - p = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) - - pk = PublicKey(generator_ed25519, b"0" * 32, public_point=p) - - assert pk.public_point() == p - - -def test_ed25519_mul_to_order_min_1(): - x1 = int( - "427838232691226969392843410947554224151809796397784248136826" - "78720006717057747" - ) - y1 = int( - "463168356949264781694283940034751631413079938662562256157830" - "33603165251855960" - ) - inf_m_1 = PointEdwards(curve_ed25519, x1, y1, 1, x1 * y1) - - assert generator_ed25519 * (generator_ed25519.order() - 1) == inf_m_1 - - -def test_ed25519_mul_to_infinity(): - assert generator_ed25519 * generator_ed25519.order() == INFINITY - - -def test_ed25519_mul_to_infinity_plus_1(): - g = generator_ed25519 - assert g * (g.order() + 1) == g - - -def test_ed25519_mul_and_add(): - g = generator_ed25519 - a = g * 128 - b = g * 64 + g * 64 - - assert a == b - - -def test_ed25519_mul_and_add_2(): - g = generator_ed25519 - - a = g * 123 - b = g * 120 + g * 3 - - assert a == b - - -def test_ed25519_mul_infinity(): - inf = PointEdwards(curve_ed25519, 0, 1, 1, 0) - - z = inf * 11 - - assert z == INFINITY - - -def test_ed25519_mul_by_zero(): - z = generator_ed25519 * 0 - - assert z == INFINITY - - -def test_ed25519_mul_by_one(): - z = generator_ed25519 * 1 - - assert z == generator_ed25519 - - -def test_ed25519_mul_custom_point(): - # verify that multiplication without order set works - - g = generator_ed25519 - - a = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) - - z = a * 11 - - assert z == g * 11 - - -def test_ed25519_pickle(): - g = generator_ed25519 - assert pickle.loads(pickle.dumps(g)) == g - - -def test_ed448_eq_against_different_curve(): - assert generator_ed25519 != generator_ed448 - - -def test_ed448_double(): - g = generator_ed448 - z = g.double() - - assert isinstance(z, PointEdwards) - - x2 = int( - "4845591495304045936995492052586696895690942404582120401876" - "6013278705691214670908136440114445572635086627683154494739" - "7859048262938744149" - ) - y2 = int( - "4940887598674337276743026725267350893505445523037277237461" - "2648447308771911703729389009346215770388834286503647778745" - "3078312060500281069" - ) - - b = PointEdwards(curve_ed448, x2, y2, 1, x2 * y2) - - assert z == b - assert g != b - - -def test_ed448_add_as_double(): - g = generator_ed448 - z = g + g - - b = g.double() - - assert z == b - - -def test_ed448_mul_as_double(): - g = generator_ed448 - z = g * 2 - b = g.double() - - assert z == b - - -def test_ed448_add_to_infinity(): - # generator * (order - 1) - x1 = int( - "5022586839996825903617194737881084981068517190547539260353" - "6473749366191269932473977736719082931859264751085238669719" - "1187378895383117729" - ) - y1 = int( - "2988192100784814926760179304439306734375440401540802420959" - "2824137233150618983587600353687865541878473398230323350346" - "2500531545062832660" - ) - inf_m_1 = PointEdwards(curve_ed448, x1, y1, 1, x1 * y1) - - inf = inf_m_1 + generator_ed448 - - assert inf is INFINITY - - -def test_ed448_mul_to_infinity(): - g = generator_ed448 - inf = g * g.order() - - assert inf is INFINITY - - -def test_ed448_mul_to_infinity_plus_1(): - g = generator_ed448 - - z = g * (g.order() + 1) - - assert z == g - - -def test_ed448_add_and_mul_equivalence(): - g = generator_ed448 - - assert g + g == g * 2 - assert g + g + g == g * 3 - - -def test_ed25519_encode(): - g = generator_ed25519 - g_bytes = g.to_bytes() - assert len(g_bytes) == 32 - exp_bytes = ( - b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - ) - assert g_bytes == exp_bytes - - -def test_ed25519_decode(): - exp_bytes = ( - b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - ) - a = PointEdwards.from_bytes(curve_ed25519, exp_bytes) - - assert a == generator_ed25519 - - -class TestEdwardsMalformed(unittest.TestCase): - def test_invalid_point(self): - exp_bytes = ( - b"\x78\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - ) - with self.assertRaises(MalformedPointError): - PointEdwards.from_bytes(curve_ed25519, exp_bytes) - - def test_invalid_length(self): - exp_bytes = ( - b"\x58\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - b"\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66\x66" - b"\x66" - ) - with self.assertRaises(MalformedPointError) as e: - PointEdwards.from_bytes(curve_ed25519, exp_bytes) - - self.assertIn("length", str(e.exception)) - - def test_ed448_invalid(self): - exp_bytes = b"\xff" * 57 - with self.assertRaises(MalformedPointError): - PointEdwards.from_bytes(curve_ed448, exp_bytes) - - -def test_ed448_encode(): - g = generator_ed448 - g_bytes = g.to_bytes() - assert len(g_bytes) == 57 - exp_bytes = ( - b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" - b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" - b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" - b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" - ) - assert g_bytes == exp_bytes - - -def test_ed448_decode(): - exp_bytes = ( - b"\x14\xfa\x30\xf2\x5b\x79\x08\x98\xad\xc8\xd7\x4e\x2c\x13\xbd" - b"\xfd\xc4\x39\x7c\xe6\x1c\xff\xd3\x3a\xd7\xc2\xa0\x05\x1e\x9c" - b"\x78\x87\x40\x98\xa3\x6c\x73\x73\xea\x4b\x62\xc7\xc9\x56\x37" - b"\x20\x76\x88\x24\xbc\xb6\x6e\x71\x46\x3f\x69\x00" - ) - - a = PointEdwards.from_bytes(curve_ed448, exp_bytes) - - assert a == generator_ed448 - - -class TestEdDSAEquality(unittest.TestCase): - def test_equal_public_points(self): - key1 = PublicKey(generator_ed25519, b"\x01" * 32) - key2 = PublicKey(generator_ed25519, b"\x01" * 32) - - self.assertEqual(key1, key2) - # verify that `__ne__` works as expected - self.assertFalse(key1 != key2) - - def test_unequal_public_points(self): - key1 = PublicKey(generator_ed25519, b"\x01" * 32) - key2 = PublicKey(generator_ed25519, b"\x03" * 32) - - self.assertNotEqual(key1, key2) - - def test_unequal_to_string(self): - key1 = PublicKey(generator_ed25519, b"\x01" * 32) - key2 = b"\x01" * 32 - - self.assertNotEqual(key1, key2) - - def test_unequal_publickey_curves(self): - key1 = PublicKey(generator_ed25519, b"\x01" * 32) - key2 = PublicKey(generator_ed448, b"\x03" * 56 + b"\x00") - - self.assertNotEqual(key1, key2) - # verify that `__ne__` works as expected - self.assertTrue(key1 != key2) - - def test_equal_private_keys(self): - key1 = PrivateKey(generator_ed25519, b"\x01" * 32) - key2 = PrivateKey(generator_ed25519, b"\x01" * 32) - - self.assertEqual(key1, key2) - # verify that `__ne__` works as expected - self.assertFalse(key1 != key2) - - def test_unequal_private_keys(self): - key1 = PrivateKey(generator_ed25519, b"\x01" * 32) - key2 = PrivateKey(generator_ed25519, b"\x02" * 32) - - self.assertNotEqual(key1, key2) - # verify that `__ne__` works as expected - self.assertTrue(key1 != key2) - - def test_unequal_privatekey_to_string(self): - key1 = PrivateKey(generator_ed25519, b"\x01" * 32) - key2 = b"\x01" * 32 - - self.assertNotEqual(key1, key2) - - def test_unequal_privatekey_curves(self): - key1 = PrivateKey(generator_ed25519, b"\x01" * 32) - key2 = PrivateKey(generator_ed448, b"\x01" * 57) - - self.assertNotEqual(key1, key2) - - -class TestInvalidEdDSAInputs(unittest.TestCase): - def test_wrong_length_of_private_key(self): - with self.assertRaises(ValueError): - PrivateKey(generator_ed25519, b"\x01" * 31) - - def test_wrong_length_of_public_key(self): - with self.assertRaises(ValueError): - PublicKey(generator_ed25519, b"\x01" * 33) - - def test_wrong_cofactor_curve(self): - ed_c = curve_ed25519 - - def _hash(data): - return hashlib.new("sha512", compat26_str(data)).digest() - - curve = CurveEdTw(ed_c.p(), ed_c.a(), ed_c.d(), 1, _hash) - g = generator_ed25519 - fake_gen = PointEdwards(curve, g.x(), g.y(), 1, g.x() * g.y()) - - with self.assertRaises(ValueError) as e: - PrivateKey(fake_gen, g.to_bytes()) - - self.assertIn("cofactor", str(e.exception)) - - def test_invalid_signature_length(self): - key = PublicKey(generator_ed25519, b"\x01" * 32) - - with self.assertRaises(ValueError) as e: - key.verify(b"", b"\x01" * 65) - - self.assertIn("length", str(e.exception)) - - def test_changing_public_key(self): - key = PublicKey(generator_ed25519, b"\x01" * 32) - - g = key.point - - new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) - - key.point = new_g - - self.assertEqual(g, key.point) - - def test_changing_public_key_to_different_point(self): - key = PublicKey(generator_ed25519, b"\x01" * 32) - - with self.assertRaises(ValueError) as e: - key.point = generator_ed25519 - - self.assertIn("coordinates", str(e.exception)) - - def test_invalid_s_value(self): - key = PublicKey( - generator_ed25519, - b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" - b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", - ) - sig_valid = bytearray( - b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" - b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" - b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" - b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" - ) - - self.assertTrue(key.verify(b"", sig_valid)) - - sig_invalid = bytearray(sig_valid) - sig_invalid[-1] = 0xFF - - with self.assertRaises(ValueError): - key.verify(b"", sig_invalid) - - def test_invalid_r_value(self): - key = PublicKey( - generator_ed25519, - b"\xd7\x5a\x98\x01\x82\xb1\x0a\xb7\xd5\x4b\xfe\xd3\xc9\x64\x07\x3a" - b"\x0e\xe1\x72\xf3\xda\xa6\x23\x25\xaf\x02\x1a\x68\xf7\x07\x51\x1a", - ) - sig_valid = bytearray( - b"\xe5\x56\x43\x00\xc3\x60\xac\x72\x90\x86\xe2\xcc\x80\x6e\x82\x8a" - b"\x84\x87\x7f\x1e\xb8\xe5\xd9\x74\xd8\x73\xe0\x65\x22\x49\x01\x55" - b"\x5f\xb8\x82\x15\x90\xa3\x3b\xac\xc6\x1e\x39\x70\x1c\xf9\xb4\x6b" - b"\xd2\x5b\xf5\xf0\x59\x5b\xbe\x24\x65\x51\x41\x43\x8e\x7a\x10\x0b" - ) - - self.assertTrue(key.verify(b"", sig_valid)) - - sig_invalid = bytearray(sig_valid) - sig_invalid[0] = 0xE0 - - with self.assertRaises(ValueError): - key.verify(b"", sig_invalid) - - -HYP_SETTINGS = dict() -if "--fast" in sys.argv: # pragma: no cover - HYP_SETTINGS["max_examples"] = 2 -else: - HYP_SETTINGS["max_examples"] = 10 - - -@settings(**HYP_SETTINGS) -@example(1) -@example(5) # smallest multiple that requires changing sign of x -@given(st.integers(min_value=1, max_value=int(generator_ed25519.order() - 1))) -def test_ed25519_encode_decode(multiple): - a = generator_ed25519 * multiple - - b = PointEdwards.from_bytes(curve_ed25519, a.to_bytes()) - - assert a == b - - -@settings(**HYP_SETTINGS) -@example(1) -@example(2) # smallest multiple that requires changing the sign of x -@given(st.integers(min_value=1, max_value=int(generator_ed448.order() - 1))) -def test_ed448_encode_decode(multiple): - a = generator_ed448 * multiple - - b = PointEdwards.from_bytes(curve_ed448, a.to_bytes()) - - assert a == b - - -@settings(**HYP_SETTINGS) -@example(1) -@example(2) -@given(st.integers(min_value=1, max_value=int(generator_ed25519.order()) - 1)) -def test_ed25519_mul_precompute_vs_naf(multiple): - """Compare multiplication with and without precomputation.""" - g = generator_ed25519 - new_g = PointEdwards(curve_ed25519, g.x(), g.y(), 1, g.x() * g.y()) - - assert g * multiple == multiple * new_g - - -# Test vectors from RFC 8032 -TEST_VECTORS = [ - # TEST 1 - ( - generator_ed25519, - "9d61b19deffd5a60ba844af492ec2cc4" "4449c5697b326919703bac031cae7f60", - "d75a980182b10ab7d54bfed3c964073a" "0ee172f3daa62325af021a68f707511a", - "", - "e5564300c360ac729086e2cc806e828a" - "84877f1eb8e5d974d873e06522490155" - "5fb8821590a33bacc61e39701cf9b46b" - "d25bf5f0595bbe24655141438e7a100b", - ), - # TEST 2 - ( - generator_ed25519, - "4ccd089b28ff96da9db6c346ec114e0f" "5b8a319f35aba624da8cf6ed4fb8a6fb", - "3d4017c3e843895a92b70aa74d1b7ebc" "9c982ccf2ec4968cc0cd55f12af4660c", - "72", - "92a009a9f0d4cab8720e820b5f642540" - "a2b27b5416503f8fb3762223ebdb69da" - "085ac1e43e15996e458f3613d0f11d8c" - "387b2eaeb4302aeeb00d291612bb0c00", - ), - # TEST 3 - ( - generator_ed25519, - "c5aa8df43f9f837bedb7442f31dcb7b1" "66d38535076f094b85ce3a2e0b4458f7", - "fc51cd8e6218a1a38da47ed00230f058" "0816ed13ba3303ac5deb911548908025", - "af82", - "6291d657deec24024827e69c3abe01a3" - "0ce548a284743a445e3680d7db5ac3ac" - "18ff9b538d16f290ae67f760984dc659" - "4a7c15e9716ed28dc027beceea1ec40a", - ), - # TEST 1024 - ( - generator_ed25519, - "f5e5767cf153319517630f226876b86c" "8160cc583bc013744c6bf255f5cc0ee5", - "278117fc144c72340f67d0f2316e8386" "ceffbf2b2428c9c51fef7c597f1d426e", - "08b8b2b733424243760fe426a4b54908" - "632110a66c2f6591eabd3345e3e4eb98" - "fa6e264bf09efe12ee50f8f54e9f77b1" - "e355f6c50544e23fb1433ddf73be84d8" - "79de7c0046dc4996d9e773f4bc9efe57" - "38829adb26c81b37c93a1b270b20329d" - "658675fc6ea534e0810a4432826bf58c" - "941efb65d57a338bbd2e26640f89ffbc" - "1a858efcb8550ee3a5e1998bd177e93a" - "7363c344fe6b199ee5d02e82d522c4fe" - "ba15452f80288a821a579116ec6dad2b" - "3b310da903401aa62100ab5d1a36553e" - "06203b33890cc9b832f79ef80560ccb9" - "a39ce767967ed628c6ad573cb116dbef" - "efd75499da96bd68a8a97b928a8bbc10" - "3b6621fcde2beca1231d206be6cd9ec7" - "aff6f6c94fcd7204ed3455c68c83f4a4" - "1da4af2b74ef5c53f1d8ac70bdcb7ed1" - "85ce81bd84359d44254d95629e9855a9" - "4a7c1958d1f8ada5d0532ed8a5aa3fb2" - "d17ba70eb6248e594e1a2297acbbb39d" - "502f1a8c6eb6f1ce22b3de1a1f40cc24" - "554119a831a9aad6079cad88425de6bd" - "e1a9187ebb6092cf67bf2b13fd65f270" - "88d78b7e883c8759d2c4f5c65adb7553" - "878ad575f9fad878e80a0c9ba63bcbcc" - "2732e69485bbc9c90bfbd62481d9089b" - "eccf80cfe2df16a2cf65bd92dd597b07" - "07e0917af48bbb75fed413d238f5555a" - "7a569d80c3414a8d0859dc65a46128ba" - "b27af87a71314f318c782b23ebfe808b" - "82b0ce26401d2e22f04d83d1255dc51a" - "ddd3b75a2b1ae0784504df543af8969b" - "e3ea7082ff7fc9888c144da2af58429e" - "c96031dbcad3dad9af0dcbaaaf268cb8" - "fcffead94f3c7ca495e056a9b47acdb7" - "51fb73e666c6c655ade8297297d07ad1" - "ba5e43f1bca32301651339e22904cc8c" - "42f58c30c04aafdb038dda0847dd988d" - "cda6f3bfd15c4b4c4525004aa06eeff8" - "ca61783aacec57fb3d1f92b0fe2fd1a8" - "5f6724517b65e614ad6808d6f6ee34df" - "f7310fdc82aebfd904b01e1dc54b2927" - "094b2db68d6f903b68401adebf5a7e08" - "d78ff4ef5d63653a65040cf9bfd4aca7" - "984a74d37145986780fc0b16ac451649" - "de6188a7dbdf191f64b5fc5e2ab47b57" - "f7f7276cd419c17a3ca8e1b939ae49e4" - "88acba6b965610b5480109c8b17b80e1" - "b7b750dfc7598d5d5011fd2dcc5600a3" - "2ef5b52a1ecc820e308aa342721aac09" - "43bf6686b64b2579376504ccc493d97e" - "6aed3fb0f9cd71a43dd497f01f17c0e2" - "cb3797aa2a2f256656168e6c496afc5f" - "b93246f6b1116398a346f1a641f3b041" - "e989f7914f90cc2c7fff357876e506b5" - "0d334ba77c225bc307ba537152f3f161" - "0e4eafe595f6d9d90d11faa933a15ef1" - "369546868a7f3a45a96768d40fd9d034" - "12c091c6315cf4fde7cb68606937380d" - "b2eaaa707b4c4185c32eddcdd306705e" - "4dc1ffc872eeee475a64dfac86aba41c" - "0618983f8741c5ef68d3a101e8a3b8ca" - "c60c905c15fc910840b94c00a0b9d0", - "0aab4c900501b3e24d7cdf4663326a3a" - "87df5e4843b2cbdb67cbf6e460fec350" - "aa5371b1508f9f4528ecea23c436d94b" - "5e8fcd4f681e30a6ac00a9704a188a03", - ), - # TEST SHA(abc) - ( - generator_ed25519, - "833fe62409237b9d62ec77587520911e" "9a759cec1d19755b7da901b96dca3d42", - "ec172b93ad5e563bf4932c70e1245034" "c35467ef2efd4d64ebf819683467e2bf", - "ddaf35a193617abacc417349ae204131" - "12e6fa4e89a97ea20a9eeee64b55d39a" - "2192992a274fc1a836ba3c23a3feebbd" - "454d4423643ce80e2a9ac94fa54ca49f", - "dc2a4459e7369633a52b1bf277839a00" - "201009a3efbf3ecb69bea2186c26b589" - "09351fc9ac90b3ecfdfbc7c66431e030" - "3dca179c138ac17ad9bef1177331a704", - ), - # Blank - ( - generator_ed448, - "6c82a562cb808d10d632be89c8513ebf" - "6c929f34ddfa8c9f63c9960ef6e348a3" - "528c8a3fcc2f044e39a3fc5b94492f8f" - "032e7549a20098f95b", - "5fd7449b59b461fd2ce787ec616ad46a" - "1da1342485a70e1f8a0ea75d80e96778" - "edf124769b46c7061bd6783df1e50f6c" - "d1fa1abeafe8256180", - "", - "533a37f6bbe457251f023c0d88f976ae" - "2dfb504a843e34d2074fd823d41a591f" - "2b233f034f628281f2fd7a22ddd47d78" - "28c59bd0a21bfd3980ff0d2028d4b18a" - "9df63e006c5d1c2d345b925d8dc00b41" - "04852db99ac5c7cdda8530a113a0f4db" - "b61149f05a7363268c71d95808ff2e65" - "2600", - ), - # 1 octet - ( - generator_ed448, - "c4eab05d357007c632f3dbb48489924d" - "552b08fe0c353a0d4a1f00acda2c463a" - "fbea67c5e8d2877c5e3bc397a659949e" - "f8021e954e0a12274e", - "43ba28f430cdff456ae531545f7ecd0a" - "c834a55d9358c0372bfa0c6c6798c086" - "6aea01eb00742802b8438ea4cb82169c" - "235160627b4c3a9480", - "03", - "26b8f91727bd62897af15e41eb43c377" - "efb9c610d48f2335cb0bd0087810f435" - "2541b143c4b981b7e18f62de8ccdf633" - "fc1bf037ab7cd779805e0dbcc0aae1cb" - "cee1afb2e027df36bc04dcecbf154336" - "c19f0af7e0a6472905e799f1953d2a0f" - "f3348ab21aa4adafd1d234441cf807c0" - "3a00", - ), - # 11 octets - ( - generator_ed448, - "cd23d24f714274e744343237b93290f5" - "11f6425f98e64459ff203e8985083ffd" - "f60500553abc0e05cd02184bdb89c4cc" - "d67e187951267eb328", - "dcea9e78f35a1bf3499a831b10b86c90" - "aac01cd84b67a0109b55a36e9328b1e3" - "65fce161d71ce7131a543ea4cb5f7e9f" - "1d8b00696447001400", - "0c3e544074ec63b0265e0c", - "1f0a8888ce25e8d458a21130879b840a" - "9089d999aaba039eaf3e3afa090a09d3" - "89dba82c4ff2ae8ac5cdfb7c55e94d5d" - "961a29fe0109941e00b8dbdeea6d3b05" - "1068df7254c0cdc129cbe62db2dc957d" - "bb47b51fd3f213fb8698f064774250a5" - "028961c9bf8ffd973fe5d5c206492b14" - "0e00", - ), - # 12 octets - ( - generator_ed448, - "258cdd4ada32ed9c9ff54e63756ae582" - "fb8fab2ac721f2c8e676a72768513d93" - "9f63dddb55609133f29adf86ec9929dc" - "cb52c1c5fd2ff7e21b", - "3ba16da0c6f2cc1f30187740756f5e79" - "8d6bc5fc015d7c63cc9510ee3fd44adc" - "24d8e968b6e46e6f94d19b945361726b" - "d75e149ef09817f580", - "64a65f3cdedcdd66811e2915", - "7eeeab7c4e50fb799b418ee5e3197ff6" - "bf15d43a14c34389b59dd1a7b1b85b4a" - "e90438aca634bea45e3a2695f1270f07" - "fdcdf7c62b8efeaf00b45c2c96ba457e" - "b1a8bf075a3db28e5c24f6b923ed4ad7" - "47c3c9e03c7079efb87cb110d3a99861" - "e72003cbae6d6b8b827e4e6c143064ff" - "3c00", - ), - # 13 octets - ( - generator_ed448, - "7ef4e84544236752fbb56b8f31a23a10" - "e42814f5f55ca037cdcc11c64c9a3b29" - "49c1bb60700314611732a6c2fea98eeb" - "c0266a11a93970100e", - "b3da079b0aa493a5772029f0467baebe" - "e5a8112d9d3a22532361da294f7bb381" - "5c5dc59e176b4d9f381ca0938e13c6c0" - "7b174be65dfa578e80", - "64a65f3cdedcdd66811e2915e7", - "6a12066f55331b6c22acd5d5bfc5d712" - "28fbda80ae8dec26bdd306743c5027cb" - "4890810c162c027468675ecf645a8317" - "6c0d7323a2ccde2d80efe5a1268e8aca" - "1d6fbc194d3f77c44986eb4ab4177919" - "ad8bec33eb47bbb5fc6e28196fd1caf5" - "6b4e7e0ba5519234d047155ac727a105" - "3100", - ), - # 64 octets - ( - generator_ed448, - "d65df341ad13e008567688baedda8e9d" - "cdc17dc024974ea5b4227b6530e339bf" - "f21f99e68ca6968f3cca6dfe0fb9f4fa" - "b4fa135d5542ea3f01", - "df9705f58edbab802c7f8363cfe5560a" - "b1c6132c20a9f1dd163483a26f8ac53a" - "39d6808bf4a1dfbd261b099bb03b3fb5" - "0906cb28bd8a081f00", - "bd0f6a3747cd561bdddf4640a332461a" - "4a30a12a434cd0bf40d766d9c6d458e5" - "512204a30c17d1f50b5079631f64eb31" - "12182da3005835461113718d1a5ef944", - "554bc2480860b49eab8532d2a533b7d5" - "78ef473eeb58c98bb2d0e1ce488a98b1" - "8dfde9b9b90775e67f47d4a1c3482058" - "efc9f40d2ca033a0801b63d45b3b722e" - "f552bad3b4ccb667da350192b61c508c" - "f7b6b5adadc2c8d9a446ef003fb05cba" - "5f30e88e36ec2703b349ca229c267083" - "3900", - ), - # 256 octets - ( - generator_ed448, - "2ec5fe3c17045abdb136a5e6a913e32a" - "b75ae68b53d2fc149b77e504132d3756" - "9b7e766ba74a19bd6162343a21c8590a" - "a9cebca9014c636df5", - "79756f014dcfe2079f5dd9e718be4171" - "e2ef2486a08f25186f6bff43a9936b9b" - "fe12402b08ae65798a3d81e22e9ec80e" - "7690862ef3d4ed3a00", - "15777532b0bdd0d1389f636c5f6b9ba7" - "34c90af572877e2d272dd078aa1e567c" - "fa80e12928bb542330e8409f31745041" - "07ecd5efac61ae7504dabe2a602ede89" - "e5cca6257a7c77e27a702b3ae39fc769" - "fc54f2395ae6a1178cab4738e543072f" - "c1c177fe71e92e25bf03e4ecb72f47b6" - "4d0465aaea4c7fad372536c8ba516a60" - "39c3c2a39f0e4d832be432dfa9a706a6" - "e5c7e19f397964ca4258002f7c0541b5" - "90316dbc5622b6b2a6fe7a4abffd9610" - "5eca76ea7b98816af0748c10df048ce0" - "12d901015a51f189f3888145c03650aa" - "23ce894c3bd889e030d565071c59f409" - "a9981b51878fd6fc110624dcbcde0bf7" - "a69ccce38fabdf86f3bef6044819de11", - "c650ddbb0601c19ca11439e1640dd931" - "f43c518ea5bea70d3dcde5f4191fe53f" - "00cf966546b72bcc7d58be2b9badef28" - "743954e3a44a23f880e8d4f1cfce2d7a" - "61452d26da05896f0a50da66a239a8a1" - "88b6d825b3305ad77b73fbac0836ecc6" - "0987fd08527c1a8e80d5823e65cafe2a" - "3d00", - ), - # 1023 octets - ( - generator_ed448, - "872d093780f5d3730df7c212664b37b8" - "a0f24f56810daa8382cd4fa3f77634ec" - "44dc54f1c2ed9bea86fafb7632d8be19" - "9ea165f5ad55dd9ce8", - "a81b2e8a70a5ac94ffdbcc9badfc3feb" - "0801f258578bb114ad44ece1ec0e799d" - "a08effb81c5d685c0c56f64eecaef8cd" - "f11cc38737838cf400", - "6ddf802e1aae4986935f7f981ba3f035" - "1d6273c0a0c22c9c0e8339168e675412" - "a3debfaf435ed651558007db4384b650" - "fcc07e3b586a27a4f7a00ac8a6fec2cd" - "86ae4bf1570c41e6a40c931db27b2faa" - "15a8cedd52cff7362c4e6e23daec0fbc" - "3a79b6806e316efcc7b68119bf46bc76" - "a26067a53f296dafdbdc11c77f7777e9" - "72660cf4b6a9b369a6665f02e0cc9b6e" - "dfad136b4fabe723d2813db3136cfde9" - "b6d044322fee2947952e031b73ab5c60" - "3349b307bdc27bc6cb8b8bbd7bd32321" - "9b8033a581b59eadebb09b3c4f3d2277" - "d4f0343624acc817804728b25ab79717" - "2b4c5c21a22f9c7839d64300232eb66e" - "53f31c723fa37fe387c7d3e50bdf9813" - "a30e5bb12cf4cd930c40cfb4e1fc6225" - "92a49588794494d56d24ea4b40c89fc0" - "596cc9ebb961c8cb10adde976a5d602b" - "1c3f85b9b9a001ed3c6a4d3b1437f520" - "96cd1956d042a597d561a596ecd3d173" - "5a8d570ea0ec27225a2c4aaff26306d1" - "526c1af3ca6d9cf5a2c98f47e1c46db9" - "a33234cfd4d81f2c98538a09ebe76998" - "d0d8fd25997c7d255c6d66ece6fa56f1" - "1144950f027795e653008f4bd7ca2dee" - "85d8e90f3dc315130ce2a00375a318c7" - "c3d97be2c8ce5b6db41a6254ff264fa6" - "155baee3b0773c0f497c573f19bb4f42" - "40281f0b1f4f7be857a4e59d416c06b4" - "c50fa09e1810ddc6b1467baeac5a3668" - "d11b6ecaa901440016f389f80acc4db9" - "77025e7f5924388c7e340a732e554440" - "e76570f8dd71b7d640b3450d1fd5f041" - "0a18f9a3494f707c717b79b4bf75c984" - "00b096b21653b5d217cf3565c9597456" - "f70703497a078763829bc01bb1cbc8fa" - "04eadc9a6e3f6699587a9e75c94e5bab" - "0036e0b2e711392cff0047d0d6b05bd2" - "a588bc109718954259f1d86678a579a3" - "120f19cfb2963f177aeb70f2d4844826" - "262e51b80271272068ef5b3856fa8535" - "aa2a88b2d41f2a0e2fda7624c2850272" - "ac4a2f561f8f2f7a318bfd5caf969614" - "9e4ac824ad3460538fdc25421beec2cc" - "6818162d06bbed0c40a387192349db67" - "a118bada6cd5ab0140ee273204f628aa" - "d1c135f770279a651e24d8c14d75a605" - "9d76b96a6fd857def5e0b354b27ab937" - "a5815d16b5fae407ff18222c6d1ed263" - "be68c95f32d908bd895cd76207ae7264" - "87567f9a67dad79abec316f683b17f2d" - "02bf07e0ac8b5bc6162cf94697b3c27c" - "d1fea49b27f23ba2901871962506520c" - "392da8b6ad0d99f7013fbc06c2c17a56" - "9500c8a7696481c1cd33e9b14e40b82e" - "79a5f5db82571ba97bae3ad3e0479515" - "bb0e2b0f3bfcd1fd33034efc6245eddd" - "7ee2086ddae2600d8ca73e214e8c2b0b" - "db2b047c6a464a562ed77b73d2d841c4" - "b34973551257713b753632efba348169" - "abc90a68f42611a40126d7cb21b58695" - "568186f7e569d2ff0f9e745d0487dd2e" - "b997cafc5abf9dd102e62ff66cba87", - "e301345a41a39a4d72fff8df69c98075" - "a0cc082b802fc9b2b6bc503f926b65bd" - "df7f4c8f1cb49f6396afc8a70abe6d8a" - "ef0db478d4c6b2970076c6a0484fe76d" - "76b3a97625d79f1ce240e7c576750d29" - "5528286f719b413de9ada3e8eb78ed57" - "3603ce30d8bb761785dc30dbc320869e" - "1a00", - ), -] - - -@pytest.mark.parametrize( - "generator,private_key,public_key,message,signature", - TEST_VECTORS, -) -def test_vectors(generator, private_key, public_key, message, signature): - private_key = a2b_hex(private_key) - public_key = a2b_hex(public_key) - message = a2b_hex(message) - signature = a2b_hex(signature) - - sig_key = PrivateKey(generator, private_key) - ver_key = PublicKey(generator, public_key) - - assert sig_key.public_key().public_key() == ver_key.public_key() - - gen_sig = sig_key.sign(message) - - assert gen_sig == signature - - assert ver_key.verify(message, signature) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_ellipticcurve.py b/venv/lib/python3.12/site-packages/ecdsa/test_ellipticcurve.py deleted file mode 100644 index 864cf108..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_ellipticcurve.py +++ /dev/null @@ -1,294 +0,0 @@ -import pytest - -try: - import unittest2 as unittest -except ImportError: - import unittest -from hypothesis import given, settings -import hypothesis.strategies as st - -try: - from hypothesis import HealthCheck - - HC_PRESENT = True -except ImportError: # pragma: no cover - HC_PRESENT = False -from .numbertheory import inverse_mod -from .ellipticcurve import CurveFp, INFINITY, Point, CurveEdTw - - -HYP_SETTINGS = {} -if HC_PRESENT: # pragma: no branch - HYP_SETTINGS["suppress_health_check"] = [HealthCheck.too_slow] - HYP_SETTINGS["deadline"] = 5000 - - -# NIST Curve P-192: -p = 6277101735386680763835789423207666416083908700390324961279 -r = 6277101735386680763835789423176059013767194773182842284081 -# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 -# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 -b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 -Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 -Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 - -c192 = CurveFp(p, -3, b) -p192 = Point(c192, Gx, Gy, r) - -c_23 = CurveFp(23, 1, 1) -g_23 = Point(c_23, 13, 7, 7) - - -HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) -HYP_SLOW_SETTINGS["max_examples"] = 2 - - -@settings(**HYP_SLOW_SETTINGS) -@given(st.integers(min_value=1, max_value=r - 1)) -def test_p192_mult_tests(multiple): - inv_m = inverse_mod(multiple, r) - - p1 = p192 * multiple - assert p1 * inv_m == p192 - - -def add_n_times(point, n): - ret = INFINITY - i = 0 - while i <= n: - yield ret - ret = ret + point - i += 1 - - -# From X9.62 I.1 (p. 96): -@pytest.mark.parametrize( - "p, m, check", - [(g_23, n, exp) for n, exp in enumerate(add_n_times(g_23, 8))], - ids=["g_23 test with mult {0}".format(i) for i in range(9)], -) -def test_add_and_mult_equivalence(p, m, check): - assert p * m == check - - -class TestCurve(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.c_23 = CurveFp(23, 1, 1) - - def test_equality_curves(self): - self.assertEqual(self.c_23, CurveFp(23, 1, 1)) - - def test_inequality_curves(self): - c192 = CurveFp(p, -3, b) - self.assertNotEqual(self.c_23, c192) - - def test_inequality_curves_by_b_only(self): - a = CurveFp(23, 1, 0) - b = CurveFp(23, 1, 1) - self.assertNotEqual(a, b) - - def test_usability_in_a_hashed_collection_curves(self): - {self.c_23: None} - - def test_hashability_curves(self): - hash(self.c_23) - - def test_conflation_curves(self): - ne1, ne2, ne3 = CurveFp(24, 1, 1), CurveFp(23, 2, 1), CurveFp(23, 1, 2) - eq1, eq2, eq3 = CurveFp(23, 1, 1), CurveFp(23, 1, 1), self.c_23 - self.assertEqual(len(set((c_23, eq1, eq2, eq3))), 1) - self.assertEqual(len(set((c_23, ne1, ne2, ne3))), 4) - self.assertDictEqual({c_23: None}, {eq1: None}) - self.assertIn(eq2, {eq3: None}) - - def test___str__(self): - self.assertEqual(str(self.c_23), "CurveFp(p=23, a=1, b=1)") - - def test___str___with_cofactor(self): - c = CurveFp(23, 1, 1, 4) - self.assertEqual(str(c), "CurveFp(p=23, a=1, b=1, h=4)") - - -class TestCurveEdTw(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.c_23 = CurveEdTw(23, 1, 1) - - def test___str__(self): - self.assertEqual(str(self.c_23), "CurveEdTw(p=23, a=1, d=1)") - - def test___str___with_cofactor(self): - c = CurveEdTw(23, 1, 1, 4) - self.assertEqual(str(c), "CurveEdTw(p=23, a=1, d=1, h=4)") - - def test_usability_in_a_hashed_collection_curves(self): - {self.c_23: None} - - def test_hashability_curves(self): - hash(self.c_23) - - -class TestPoint(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.c_23 = CurveFp(23, 1, 1) - cls.g_23 = Point(cls.c_23, 13, 7, 7) - - p = 6277101735386680763835789423207666416083908700390324961279 - r = 6277101735386680763835789423176059013767194773182842284081 - # s = 0x3045ae6fc8422f64ed579528d38120eae12196d5 - # c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65 - b = 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1 - Gx = 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012 - Gy = 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 - - cls.c192 = CurveFp(p, -3, b) - cls.p192 = Point(cls.c192, Gx, Gy, r) - - def test_p192(self): - # Checking against some sample computations presented - # in X9.62: - d = 651056770906015076056810763456358567190100156695615665659 - Q = d * self.p192 - self.assertEqual( - Q.x(), 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5 - ) - - k = 6140507067065001063065065565667405560006161556565665656654 - R = k * self.p192 - self.assertEqual( - R.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD - ) - self.assertEqual( - R.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 - ) - - u1 = 2563697409189434185194736134579731015366492496392189760599 - u2 = 6266643813348617967186477710235785849136406323338782220568 - temp = u1 * self.p192 + u2 * Q - self.assertEqual( - temp.x(), 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD - ) - self.assertEqual( - temp.y(), 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835 - ) - - def test_double_infinity(self): - p1 = INFINITY - p3 = p1.double() - self.assertEqual(p1, p3) - self.assertEqual(p3.x(), p1.x()) - self.assertEqual(p3.y(), p3.y()) - - def test_double(self): - x1, y1, x3, y3 = (3, 10, 7, 12) - - p1 = Point(self.c_23, x1, y1) - p3 = p1.double() - self.assertEqual(p3.x(), x3) - self.assertEqual(p3.y(), y3) - - def test_double_to_infinity(self): - p1 = Point(self.c_23, 11, 20) - p2 = p1.double() - self.assertEqual((p2.x(), p2.y()), (4, 0)) - self.assertNotEqual(p2, INFINITY) - p3 = p2.double() - self.assertEqual(p3, INFINITY) - self.assertIs(p3, INFINITY) - - def test_add_self_to_infinity(self): - p1 = Point(self.c_23, 11, 20) - p2 = p1 + p1 - self.assertEqual((p2.x(), p2.y()), (4, 0)) - self.assertNotEqual(p2, INFINITY) - p3 = p2 + p2 - self.assertEqual(p3, INFINITY) - self.assertIs(p3, INFINITY) - - def test_mul_to_infinity(self): - p1 = Point(self.c_23, 11, 20) - p2 = p1 * 2 - self.assertEqual((p2.x(), p2.y()), (4, 0)) - self.assertNotEqual(p2, INFINITY) - p3 = p2 * 2 - self.assertEqual(p3, INFINITY) - self.assertIs(p3, INFINITY) - - def test_multiply(self): - x1, y1, m, x3, y3 = (3, 10, 2, 7, 12) - p1 = Point(self.c_23, x1, y1) - p3 = p1 * m - self.assertEqual(p3.x(), x3) - self.assertEqual(p3.y(), y3) - - # Trivial tests from X9.62 B.3: - def test_add(self): - """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" - - x1, y1, x2, y2, x3, y3 = (3, 10, 9, 7, 17, 20) - p1 = Point(self.c_23, x1, y1) - p2 = Point(self.c_23, x2, y2) - p3 = p1 + p2 - self.assertEqual(p3.x(), x3) - self.assertEqual(p3.y(), y3) - - def test_add_as_double(self): - """We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3).""" - - x1, y1, x2, y2, x3, y3 = (3, 10, 3, 10, 7, 12) - p1 = Point(self.c_23, x1, y1) - p2 = Point(self.c_23, x2, y2) - p3 = p1 + p2 - self.assertEqual(p3.x(), x3) - self.assertEqual(p3.y(), y3) - - def test_equality_points(self): - self.assertEqual(self.g_23, Point(self.c_23, 13, 7, 7)) - - def test_inequality_points(self): - c = CurveFp(100, -3, 100) - p = Point(c, 100, 100, 100) - self.assertNotEqual(self.g_23, p) - - def test_inequality_points_diff_types(self): - c = CurveFp(100, -3, 100) - self.assertNotEqual(self.g_23, c) - - def test_inequality_diff_y(self): - p1 = Point(self.c_23, 6, 4) - p2 = Point(self.c_23, 6, 19) - - self.assertNotEqual(p1, p2) - - def test_to_bytes_from_bytes(self): - p = Point(self.c_23, 3, 10) - - self.assertEqual(p, Point.from_bytes(self.c_23, p.to_bytes())) - - def test_add_to_neg_self(self): - p = Point(self.c_23, 3, 10) - - self.assertEqual(INFINITY, p + (-p)) - - def test_add_to_infinity(self): - p = Point(self.c_23, 3, 10) - - self.assertIs(p, p + INFINITY) - - def test_mul_infinity_by_scalar(self): - self.assertIs(INFINITY, INFINITY * 10) - - def test_mul_by_negative(self): - p = Point(self.c_23, 3, 10) - - self.assertEqual(p * -5, (-p) * 5) - - def test_str_infinity(self): - self.assertEqual(str(INFINITY), "infinity") - - def test_str_point(self): - p = Point(self.c_23, 3, 10) - - self.assertEqual(str(p), "(3,10)") diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_jacobi.py b/venv/lib/python3.12/site-packages/ecdsa/test_jacobi.py deleted file mode 100644 index f811b922..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_jacobi.py +++ /dev/null @@ -1,934 +0,0 @@ -import pickle -import sys - -try: - import unittest2 as unittest -except ImportError: - import unittest - -import os -import signal -import pytest -import threading -import platform -import hypothesis.strategies as st -from hypothesis import given, assume, settings, example - -from .ellipticcurve import CurveFp, PointJacobi, INFINITY, Point -from .ecdsa import ( - generator_256, - curve_256, - generator_224, - generator_brainpoolp160r1, - curve_brainpoolp160r1, - generator_112r2, - curve_112r2, -) -from .numbertheory import inverse_mod -from .util import randrange - - -NO_OLD_SETTINGS = {} -if sys.version_info > (2, 7): # pragma: no branch - NO_OLD_SETTINGS["deadline"] = 5000 - - -SLOW_SETTINGS = {} -if "--fast" in sys.argv: # pragma: no cover - SLOW_SETTINGS["max_examples"] = 2 -else: - SLOW_SETTINGS["max_examples"] = 10 - - -class TestJacobi(unittest.TestCase): - def test___init__(self): - curve = object() - x = 2 - y = 3 - z = 1 - order = 4 - pj = PointJacobi(curve, x, y, z, order) - - self.assertEqual(pj.order(), order) - self.assertIs(pj.curve(), curve) - self.assertEqual(pj.x(), x) - self.assertEqual(pj.y(), y) - - def test_add_with_different_curves(self): - p_a = PointJacobi.from_affine(generator_256) - p_b = PointJacobi.from_affine(generator_224) - - with self.assertRaises(ValueError): # pragma: no branch - p_a + p_b - - def test_compare_different_curves(self): - self.assertNotEqual(generator_256, generator_224) - - def test_equality_with_non_point(self): - pj = PointJacobi.from_affine(generator_256) - - self.assertNotEqual(pj, "value") - - def test_conversion(self): - pj = PointJacobi.from_affine(generator_256) - pw = pj.to_affine() - - self.assertEqual(generator_256, pw) - - def test_single_double(self): - pj = PointJacobi.from_affine(generator_256) - pw = generator_256.double() - - pj = pj.double() - - self.assertEqual(pj.x(), pw.x()) - self.assertEqual(pj.y(), pw.y()) - - def test_double_with_zero_point(self): - pj = PointJacobi(curve_256, 0, 0, 1) - - pj = pj.double() - - self.assertIs(pj, INFINITY) - - def test_double_with_zero_equivalent_point(self): - pj = PointJacobi(curve_256, 0, 0, 0) - - pj = pj.double() - - self.assertIs(pj, INFINITY) - - def test_double_with_zero_equivalent_point_non_zero_z_non_zero_y(self): - pj = PointJacobi(curve_256, 0, 1, curve_256.p()) - - pj = pj.double() - - self.assertIs(pj, INFINITY) - - def test_double_with_zero_equivalent_point_non_zero_z(self): - pj = PointJacobi(curve_256, 0, 0, curve_256.p()) - - pj = pj.double() - - self.assertIs(pj, INFINITY) - - def test_compare_with_affine_point(self): - pj = PointJacobi.from_affine(generator_256) - pa = pj.to_affine() - - self.assertEqual(pj, pa) - self.assertEqual(pa, pj) - - def test_to_affine_with_zero_point(self): - pj = PointJacobi(curve_256, 0, 0, 0) - - pa = pj.to_affine() - - self.assertIs(pa, INFINITY) - - def test_add_with_affine_point(self): - pj = PointJacobi.from_affine(generator_256) - pa = pj.to_affine() - - s = pj + pa - - self.assertEqual(s, pj.double()) - - def test_radd_with_affine_point(self): - pj = PointJacobi.from_affine(generator_256) - pa = pj.to_affine() - - s = pa + pj - - self.assertEqual(s, pj.double()) - - def test_add_with_infinity(self): - pj = PointJacobi.from_affine(generator_256) - - s = pj + INFINITY - - self.assertEqual(s, pj) - - def test_add_zero_point_to_affine(self): - pa = PointJacobi.from_affine(generator_256).to_affine() - pj = PointJacobi(curve_256, 0, 0, 0) - - s = pj + pa - - self.assertIs(s, pa) - - def test_multiply_by_zero(self): - pj = PointJacobi.from_affine(generator_256) - - pj = pj * 0 - - self.assertIs(pj, INFINITY) - - def test_zero_point_multiply_by_one(self): - pj = PointJacobi(curve_256, 0, 0, 1) - - pj = pj * 1 - - self.assertIs(pj, INFINITY) - - def test_multiply_by_one(self): - pj = PointJacobi.from_affine(generator_256) - pw = generator_256 * 1 - - pj = pj * 1 - - self.assertEqual(pj.x(), pw.x()) - self.assertEqual(pj.y(), pw.y()) - - def test_multiply_by_two(self): - pj = PointJacobi.from_affine(generator_256) - pw = generator_256 * 2 - - pj = pj * 2 - - self.assertEqual(pj.x(), pw.x()) - self.assertEqual(pj.y(), pw.y()) - - def test_rmul_by_two(self): - pj = PointJacobi.from_affine(generator_256) - pw = generator_256 * 2 - - pj = 2 * pj - - self.assertEqual(pj, pw) - - def test_compare_non_zero_with_infinity(self): - pj = PointJacobi.from_affine(generator_256) - - self.assertNotEqual(pj, INFINITY) - - def test_compare_non_zero_bad_scale_with_infinity(self): - pj = PointJacobi(curve_256, 1, 1, 0) - self.assertEqual(pj, INFINITY) - - def test_eq_x_0_on_curve_with_infinity(self): - c_23 = CurveFp(23, 1, 1) - pj = PointJacobi(c_23, 0, 1, 1) - - self.assertTrue(c_23.contains_point(0, 1)) - - self.assertNotEqual(pj, INFINITY) - - def test_eq_y_0_on_curve_with_infinity(self): - c_23 = CurveFp(23, 1, 1) - pj = PointJacobi(c_23, 4, 0, 1) - - self.assertTrue(c_23.contains_point(4, 0)) - - self.assertNotEqual(pj, INFINITY) - - def test_eq_with_same_x_different_y(self): - c_23 = CurveFp(23, 1, 1) - p_a = PointJacobi(c_23, 0, 22, 1) - p_b = PointJacobi(c_23, 0, 1, 1) - - self.assertNotEqual(p_a, p_b) - - def test_compare_zero_point_with_infinity(self): - pj = PointJacobi(curve_256, 0, 0, 0) - - self.assertEqual(pj, INFINITY) - - def test_compare_double_with_multiply(self): - pj = PointJacobi.from_affine(generator_256) - dbl = pj.double() - mlpl = pj * 2 - - self.assertEqual(dbl, mlpl) - - @settings(**SLOW_SETTINGS) - @given( - st.integers( - min_value=0, max_value=int(generator_brainpoolp160r1.order() - 1) - ) - ) - def test_multiplications(self, mul): - pj = PointJacobi.from_affine(generator_brainpoolp160r1) - pw = pj.to_affine() * mul - - pj = pj * mul - - self.assertEqual((pj.x(), pj.y()), (pw.x(), pw.y())) - self.assertEqual(pj, pw) - - @settings(**SLOW_SETTINGS) - @given( - st.integers( - min_value=0, max_value=int(generator_brainpoolp160r1.order() - 1) - ) - ) - @example(0) - @example(int(generator_brainpoolp160r1.order())) - def test_precompute(self, mul): - precomp = generator_brainpoolp160r1 - self.assertTrue(precomp._PointJacobi__precompute) - pj = PointJacobi.from_affine(generator_brainpoolp160r1) - - a = precomp * mul - b = pj * mul - - self.assertEqual(a, b) - - @settings(**SLOW_SETTINGS) - @given( - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - ) - @example(3, 3) - def test_add_scaled_points(self, a_mul, b_mul): - j_g = PointJacobi.from_affine(generator_brainpoolp160r1) - a = PointJacobi.from_affine(j_g * a_mul) - b = PointJacobi.from_affine(j_g * b_mul) - - c = a + b - - self.assertEqual(c, j_g * (a_mul + b_mul)) - - @settings(**SLOW_SETTINGS) - @given( - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), - ) - def test_add_one_scaled_point(self, a_mul, b_mul, new_z): - j_g = PointJacobi.from_affine(generator_brainpoolp160r1) - a = PointJacobi.from_affine(j_g * a_mul) - b = PointJacobi.from_affine(j_g * b_mul) - - p = curve_brainpoolp160r1.p() - - assume(inverse_mod(new_z, p)) - - new_zz = new_z * new_z % p - - b = PointJacobi( - curve_brainpoolp160r1, - b.x() * new_zz % p, - b.y() * new_zz * new_z % p, - new_z, - ) - - c = a + b - - self.assertEqual(c, j_g * (a_mul + b_mul)) - - @pytest.mark.slow - @settings(**SLOW_SETTINGS) - @given( - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - st.integers(min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1)), - ) - @example(1, 1, 1) - @example(3, 3, 3) - @example(2, int(generator_brainpoolp160r1.order() - 2), 1) - @example(2, int(generator_brainpoolp160r1.order() - 2), 3) - def test_add_same_scale_points(self, a_mul, b_mul, new_z): - j_g = PointJacobi.from_affine(generator_brainpoolp160r1) - a = PointJacobi.from_affine(j_g * a_mul) - b = PointJacobi.from_affine(j_g * b_mul) - - p = curve_brainpoolp160r1.p() - - assume(inverse_mod(new_z, p)) - - new_zz = new_z * new_z % p - - a = PointJacobi( - curve_brainpoolp160r1, - a.x() * new_zz % p, - a.y() * new_zz * new_z % p, - new_z, - ) - b = PointJacobi( - curve_brainpoolp160r1, - b.x() * new_zz % p, - b.y() * new_zz * new_z % p, - new_z, - ) - - c = a + b - - self.assertEqual(c, j_g * (a_mul + b_mul)) - - def test_add_same_scale_points_static(self): - j_g = generator_brainpoolp160r1 - p = curve_brainpoolp160r1.p() - a = j_g * 11 - a.scale() - z1 = 13 - x = PointJacobi( - curve_brainpoolp160r1, - a.x() * z1**2 % p, - a.y() * z1**3 % p, - z1, - ) - y = PointJacobi( - curve_brainpoolp160r1, - a.x() * z1**2 % p, - a.y() * z1**3 % p, - z1, - ) - - c = a + a - - self.assertEqual(c, x + y) - - @pytest.mark.slow - @settings(**SLOW_SETTINGS) - @given( - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - st.integers( - min_value=1, max_value=int(generator_brainpoolp160r1.order() - 1) - ), - st.lists( - st.integers( - min_value=1, max_value=int(curve_brainpoolp160r1.p() - 1) - ), - min_size=2, - max_size=2, - unique=True, - ), - ) - @example(2, 2, [2, 1]) - @example(2, 2, [2, 3]) - @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 3]) - @example(2, int(generator_brainpoolp160r1.order() - 2), [2, 1]) - def test_add_different_scale_points(self, a_mul, b_mul, new_z): - j_g = PointJacobi.from_affine(generator_brainpoolp160r1) - a = PointJacobi.from_affine(j_g * a_mul) - b = PointJacobi.from_affine(j_g * b_mul) - - p = curve_brainpoolp160r1.p() - - assume(inverse_mod(new_z[0], p)) - assume(inverse_mod(new_z[1], p)) - - new_zz0 = new_z[0] * new_z[0] % p - new_zz1 = new_z[1] * new_z[1] % p - - a = PointJacobi( - curve_brainpoolp160r1, - a.x() * new_zz0 % p, - a.y() * new_zz0 * new_z[0] % p, - new_z[0], - ) - b = PointJacobi( - curve_brainpoolp160r1, - b.x() * new_zz1 % p, - b.y() * new_zz1 * new_z[1] % p, - new_z[1], - ) - - c = a + b - - self.assertEqual(c, j_g * (a_mul + b_mul)) - - def test_add_different_scale_points_static(self): - j_g = generator_brainpoolp160r1 - p = curve_brainpoolp160r1.p() - a = j_g * 11 - a.scale() - z1 = 13 - x = PointJacobi( - curve_brainpoolp160r1, - a.x() * z1**2 % p, - a.y() * z1**3 % p, - z1, - ) - z2 = 29 - y = PointJacobi( - curve_brainpoolp160r1, - a.x() * z2**2 % p, - a.y() * z2**3 % p, - z2, - ) - - c = a + a - - self.assertEqual(c, x + y) - - def test_add_different_points_same_scale_static(self): - j_g = generator_brainpoolp160r1 - p = curve_brainpoolp160r1.p() - a = j_g * 11 - a.scale() - b = j_g * 12 - z = 13 - x = PointJacobi( - curve_brainpoolp160r1, - a.x() * z**2 % p, - a.y() * z**3 % p, - z, - ) - y = PointJacobi( - curve_brainpoolp160r1, - b.x() * z**2 % p, - b.y() * z**3 % p, - z, - ) - - c = a + b - - self.assertEqual(c, x + y) - - def test_add_same_point_different_scale_second_z_1_static(self): - j_g = generator_112r2 - p = curve_112r2.p() - z = 11 - a = j_g * z - a.scale() - - x = PointJacobi( - curve_112r2, - a.x() * z**2 % p, - a.y() * z**3 % p, - z, - ) - y = PointJacobi( - curve_112r2, - a.x(), - a.y(), - 1, - ) - - c = a + a - - self.assertEqual(c, x + y) - - def test_add_to_infinity_static(self): - j_g = generator_112r2 - - z = 11 - a = j_g * z - a.scale() - - b = -a - - x = PointJacobi( - curve_112r2, - a.x(), - a.y(), - 1, - ) - y = PointJacobi( - curve_112r2, - b.x(), - b.y(), - 1, - ) - - self.assertEqual(INFINITY, x + y) - - def test_add_point_3_times(self): - j_g = PointJacobi.from_affine(generator_256) - - self.assertEqual(j_g * 3, j_g + j_g + j_g) - - def test_mul_without_order(self): - j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) - - self.assertEqual(j_g * generator_256.order(), INFINITY) - - def test_mul_add_inf(self): - j_g = PointJacobi.from_affine(generator_256) - - self.assertEqual(j_g, j_g.mul_add(1, INFINITY, 1)) - - def test_mul_add_same(self): - j_g = PointJacobi.from_affine(generator_256) - - self.assertEqual(j_g * 2, j_g.mul_add(1, j_g, 1)) - - def test_mul_add_precompute(self): - j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) - b = PointJacobi.from_affine(j_g * 255, True) - - self.assertEqual(j_g * 256, j_g + b) - self.assertEqual(j_g * (5 + 255 * 7), j_g * 5 + b * 7) - self.assertEqual(j_g * (5 + 255 * 7), j_g.mul_add(5, b, 7)) - - def test_mul_add_precompute_large(self): - j_g = PointJacobi.from_affine(generator_brainpoolp160r1, True) - b = PointJacobi.from_affine(j_g * 255, True) - - self.assertEqual(j_g * 256, j_g + b) - self.assertEqual( - j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 - ) - self.assertEqual( - j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) - ) - - def test_mul_add_to_mul(self): - j_g = PointJacobi.from_affine(generator_256) - - a = j_g * 3 - b = j_g.mul_add(2, j_g, 1) - - self.assertEqual(a, b) - - def test_mul_add_differnt(self): - j_g = PointJacobi.from_affine(generator_256) - - w_a = j_g * 2 - - self.assertEqual(j_g.mul_add(1, w_a, 1), j_g * 3) - - def test_mul_add_slightly_different(self): - j_g = PointJacobi.from_affine(generator_256) - - w_a = j_g * 2 - w_b = j_g * 3 - - self.assertEqual(w_a.mul_add(1, w_b, 3), w_a * 1 + w_b * 3) - - def test_mul_add(self): - j_g = PointJacobi.from_affine(generator_256) - - w_a = generator_256 * 255 - w_b = generator_256 * (0xA8 * 0xF0) - j_b = j_g * 0xA8 - - ret = j_g.mul_add(255, j_b, 0xF0) - - self.assertEqual(ret.to_affine(), w_a + w_b) - - def test_mul_add_zero(self): - j_g = PointJacobi.from_affine(generator_256) - - w_a = generator_256 * 255 - w_b = generator_256 * (0 * 0xA8) - - j_b = j_g * 0xA8 - - ret = j_g.mul_add(255, j_b, 0) - - self.assertEqual(ret.to_affine(), w_a + w_b) - - def test_mul_add_large(self): - j_g = PointJacobi.from_affine(generator_256) - b = PointJacobi.from_affine(j_g * 255) - - self.assertEqual(j_g * 256, j_g + b) - self.assertEqual( - j_g * (0xFF00 + 255 * 0xF0F0), j_g * 0xFF00 + b * 0xF0F0 - ) - self.assertEqual( - j_g * (0xFF00 + 255 * 0xF0F0), j_g.mul_add(0xFF00, b, 0xF0F0) - ) - - def test_mul_add_with_infinity_as_result(self): - j_g = PointJacobi.from_affine(generator_256) - - order = generator_256.order() - - b = PointJacobi.from_affine(generator_256 * 256) - - self.assertEqual(j_g.mul_add(order % 256, b, order // 256), INFINITY) - - def test_mul_add_without_order(self): - j_g = PointJacobi(curve_256, generator_256.x(), generator_256.y(), 1) - - order = generator_256.order() - - w_b = generator_256 * 34 - w_b.scale() - - b = PointJacobi(curve_256, w_b.x(), w_b.y(), 1) - - self.assertEqual(j_g.mul_add(order % 34, b, order // 34), INFINITY) - - def test_mul_add_with_doubled_negation_of_itself(self): - j_g = PointJacobi.from_affine(generator_256 * 17) - - dbl_neg = 2 * (-j_g) - - self.assertEqual(j_g.mul_add(4, dbl_neg, 2), INFINITY) - - @given( - st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), - st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), - st.integers(min_value=0, max_value=int(generator_112r2.order() - 1)), - ) - @example(693, 2, 3293) # values that will hit all the conditions for NAF - def test_mul_add_random(self, mul1, mul2, mul3): - p_a = PointJacobi.from_affine(generator_112r2) - p_b = generator_112r2 * mul2 - - res = p_a.mul_add(mul1, p_b, mul3) - - self.assertEqual(res, p_a * mul1 + p_b * mul3) - - def test_equality(self): - pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) - pj2 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) - self.assertEqual(pj1, pj2) - - def test_equality_with_invalid_object(self): - j_g = PointJacobi.from_affine(generator_256) - - self.assertNotEqual(j_g, 12) - - def test_equality_with_wrong_curves(self): - p_a = PointJacobi.from_affine(generator_256) - p_b = PointJacobi.from_affine(generator_224) - - self.assertNotEqual(p_a, p_b) - - def test_add_with_point_at_infinity(self): - pj1 = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) - x, y, z = pj1._add(2, 3, 1, 5, 5, 0, 23) - - self.assertEqual((x, y, z), (2, 3, 1)) - - def test_double_to_infinity(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 11, 20, 1) - p2 = p.double() - self.assertEqual((p2.x(), p2.y()), (4, 0)) - self.assertNotEqual(p2, INFINITY) - p3 = p2.double() - self.assertEqual(p3, INFINITY) - self.assertIs(p3, INFINITY) - - def test_double_to_x_0(self): - c_23_2 = CurveFp(23, 1, 2) - p = PointJacobi(c_23_2, 9, 2, 1) - p2 = p.double() - - self.assertEqual((p2.x(), p2.y()), (0, 18)) - - def test_mul_to_infinity(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 11, 20, 1) - p2 = p * 2 - self.assertEqual((p2.x(), p2.y()), (4, 0)) - self.assertNotEqual(p2, INFINITY) - p3 = p2 * 2 - self.assertEqual(p3, INFINITY) - self.assertIs(p3, INFINITY) - - def test_add_to_infinity(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 11, 20, 1) - p2 = p + p - self.assertEqual((p2.x(), p2.y()), (4, 0)) - self.assertNotEqual(p2, INFINITY) - p3 = p2 + p2 - self.assertEqual(p3, INFINITY) - self.assertIs(p3, INFINITY) - - def test_mul_to_x_0(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 9, 7, 1) - - p2 = p * 13 - self.assertEqual((p2.x(), p2.y()), (0, 22)) - - def test_mul_to_y_0(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 9, 7, 1) - - p2 = p * 14 - self.assertEqual((p2.x(), p2.y()), (4, 0)) - - def test_add_to_x_0(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 9, 7, 1) - - p2 = p * 12 + p - self.assertEqual((p2.x(), p2.y()), (0, 22)) - - def test_add_to_y_0(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 9, 7, 1) - - p2 = p * 13 + p - self.assertEqual((p2.x(), p2.y()), (4, 0)) - - def test_add_diff_z_to_infinity(self): - c_23 = CurveFp(23, 1, 1) - p = PointJacobi(c_23, 9, 7, 1) - - c = p * 20 + p * 8 - self.assertIs(c, INFINITY) - - def test_pickle(self): - pj = PointJacobi(curve=CurveFp(23, 1, 1, 1), x=2, y=3, z=1, order=1) - self.assertEqual(pickle.loads(pickle.dumps(pj)), pj) - - @pytest.mark.slow - @settings(**NO_OLD_SETTINGS) - @pytest.mark.skipif( - platform.python_implementation() == "PyPy", - reason="threading on PyPy breaks coverage", - ) - @given(st.integers(min_value=1, max_value=10)) - def test_multithreading(self, thread_num): # pragma: no cover - # ensure that generator's precomputation table is filled - generator_112r2 * 2 - - # create a fresh point that doesn't have a filled precomputation table - gen = generator_112r2 - gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) - - self.assertEqual(gen._PointJacobi__precompute, []) - - def runner(generator): - order = generator.order() - for _ in range(10): - generator * randrange(order) - - threads = [] - for _ in range(thread_num): - threads.append(threading.Thread(target=runner, args=(gen,))) - - for t in threads: - t.start() - - runner(gen) - - for t in threads: - t.join() - - self.assertEqual( - gen._PointJacobi__precompute, - generator_112r2._PointJacobi__precompute, - ) - - @pytest.mark.slow - @pytest.mark.skipif( - platform.system() == "Windows" - or platform.python_implementation() == "PyPy", - reason="there are no signals on Windows, and threading breaks coverage" - " on PyPy", - ) - def test_multithreading_with_interrupts(self): # pragma: no cover - thread_num = 10 - # ensure that generator's precomputation table is filled - generator_112r2 * 2 - - # create a fresh point that doesn't have a filled precomputation table - gen = generator_112r2 - gen = PointJacobi(gen.curve(), gen.x(), gen.y(), 1, gen.order(), True) - - self.assertEqual(gen._PointJacobi__precompute, []) - - def runner(generator): - order = generator.order() - for _ in range(50): - generator * randrange(order) - - def interrupter(barrier_start, barrier_end, lock_exit): - # wait until MainThread can handle KeyboardInterrupt - barrier_start.release() - barrier_end.acquire() - os.kill(os.getpid(), signal.SIGINT) - lock_exit.release() - - threads = [] - for _ in range(thread_num): - threads.append(threading.Thread(target=runner, args=(gen,))) - - barrier_start = threading.Lock() - barrier_start.acquire() - barrier_end = threading.Lock() - barrier_end.acquire() - lock_exit = threading.Lock() - lock_exit.acquire() - - threads.append( - threading.Thread( - target=interrupter, - args=(barrier_start, barrier_end, lock_exit), - ) - ) - - for t in threads: - t.start() - - with self.assertRaises(KeyboardInterrupt): - # signal to interrupter that we can now handle the signal - barrier_start.acquire() - barrier_end.release() - runner(gen) - # use the lock to ensure we never go past the scope of - # assertRaises before the os.kill is called - lock_exit.acquire() - - for t in threads: - t.join() - - self.assertEqual( - gen._PointJacobi__precompute, - generator_112r2._PointJacobi__precompute, - ) - - -class TestZeroCurve(unittest.TestCase): - """Tests with curve that has (0, 0) on the curve.""" - - def setUp(self): - self.curve = CurveFp(23, 1, 0) - - def test_zero_point_on_curve(self): - self.assertTrue(self.curve.contains_point(0, 0)) - - def test_double_to_0_0_point(self): - p = PointJacobi(self.curve, 1, 18, 1) - - d = p.double() - - self.assertNotEqual(d, INFINITY) - self.assertEqual((0, 0), (d.x(), d.y())) - - def test_double_to_0_0_point_with_non_one_z(self): - z = 2 - p = PointJacobi(self.curve, 1 * z**2, 18 * z**3, z) - - d = p.double() - - self.assertNotEqual(d, INFINITY) - self.assertEqual((0, 0), (d.x(), d.y())) - - def test_mul_to_0_0_point(self): - p = PointJacobi(self.curve, 11, 13, 1) - - d = p * 12 - - self.assertNotEqual(d, INFINITY) - self.assertEqual((0, 0), (d.x(), d.y())) - - def test_double_of_0_0_point(self): - p = PointJacobi(self.curve, 0, 0, 1) - - d = p.double() - - self.assertIs(d, INFINITY) - - def test_compare_to_old_implementation(self): - p = PointJacobi(self.curve, 11, 13, 1) - p_c = Point(self.curve, 11, 13) - - for i in range(24): - self.assertEqual(p * i, p_c * i) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_keys.py b/venv/lib/python3.12/site-packages/ecdsa/test_keys.py deleted file mode 100644 index 348475e2..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_keys.py +++ /dev/null @@ -1,1138 +0,0 @@ -try: - import unittest2 as unittest -except ImportError: - import unittest - -try: - buffer -except NameError: - buffer = memoryview - -import os -import array -import pytest -import hashlib - -from .keys import ( - VerifyingKey, - SigningKey, - MalformedPointError, - BadSignatureError, -) -from .der import ( - unpem, - UnexpectedDER, - encode_sequence, - encode_oid, - encode_bitstring, - encode_integer, - encode_octet_string, -) -from .util import ( - sigencode_string, - sigencode_der, - sigencode_strings, - sigdecode_string, - sigdecode_der, - sigdecode_strings, -) -from .curves import NIST256p, Curve, BRAINPOOLP160r1, Ed25519, Ed448 -from .ellipticcurve import Point, PointJacobi, CurveFp, INFINITY -from .ecdsa import generator_brainpoolp160r1 - - -class TestVerifyingKeyFromString(unittest.TestCase): - """ - Verify that ecdsa.keys.VerifyingKey.from_string() can be used with - bytes-like objects - """ - - @classmethod - def setUpClass(cls): - cls.key_bytes = ( - b"\x04L\xa2\x95\xdb\xc7Z\xd7\x1f\x93\nz\xcf\x97\xcf" - b"\xd7\xc2\xd9o\xfe8}X!\xae\xd4\xfah\xfa^\rpI\xba\xd1" - b"Y\xfb\x92xa\xebo+\x9cG\xfav\xca" - ) - cls.vk = VerifyingKey.from_string(cls.key_bytes) - - def test_bytes(self): - self.assertIsNotNone(self.vk) - self.assertIsInstance(self.vk, VerifyingKey) - self.assertEqual( - self.vk.pubkey.point.x(), - 105419898848891948935835657980914000059957975659675736097, - ) - self.assertEqual( - self.vk.pubkey.point.y(), - 4286866841217412202667522375431381222214611213481632495306, - ) - - def test_bytes_memoryview(self): - vk = VerifyingKey.from_string(buffer(self.key_bytes)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytearray(self): - vk = VerifyingKey.from_string(bytearray(self.key_bytes)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytesarray_memoryview(self): - vk = VerifyingKey.from_string(buffer(bytearray(self.key_bytes))) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_array_array_of_bytes(self): - arr = array.array("B", self.key_bytes) - vk = VerifyingKey.from_string(arr) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_array_array_of_bytes_memoryview(self): - arr = array.array("B", self.key_bytes) - vk = VerifyingKey.from_string(buffer(arr)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_array_array_of_ints(self): - arr = array.array("I", self.key_bytes) - vk = VerifyingKey.from_string(arr) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_array_array_of_ints_memoryview(self): - arr = array.array("I", self.key_bytes) - vk = VerifyingKey.from_string(buffer(arr)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytes_uncompressed(self): - vk = VerifyingKey.from_string(b"\x04" + self.key_bytes) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytearray_uncompressed(self): - vk = VerifyingKey.from_string(bytearray(b"\x04" + self.key_bytes)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytes_compressed(self): - vk = VerifyingKey.from_string(b"\x02" + self.key_bytes[:24]) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytearray_compressed(self): - vk = VerifyingKey.from_string(bytearray(b"\x02" + self.key_bytes[:24])) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_ed25519_VerifyingKey_from_string_imported(self): - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"AAA", Ed25519) - - -class TestVerifyingKeyFromDer(unittest.TestCase): - """ - Verify that ecdsa.keys.VerifyingKey.from_der() can be used with - bytes-like objects. - """ - - @classmethod - def setUpClass(cls): - prv_key_str = ( - "-----BEGIN EC PRIVATE KEY-----\n" - "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" - "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" - "bA==\n" - "-----END EC PRIVATE KEY-----\n" - ) - key_str = ( - "-----BEGIN PUBLIC KEY-----\n" - "MEkwEwYHKoZIzj0CAQYIKoZIzj0DAQEDMgAEuIF30ITvF/XkVjlAgCg2D59ZtKTX\n" - "Jk5i2gZR3OR6NaTFtFz1FZNCOotVe5wgmfNs\n" - "-----END PUBLIC KEY-----\n" - ) - cls.key_pem = key_str - - cls.key_bytes = unpem(key_str) - assert isinstance(cls.key_bytes, bytes) - cls.vk = VerifyingKey.from_pem(key_str) - cls.sk = SigningKey.from_pem(prv_key_str) - - key_str = ( - "-----BEGIN PUBLIC KEY-----\n" - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8\n" - "Poqzgjau4kfxBPyZimeRfuY/9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" - "-----END PUBLIC KEY-----\n" - ) - cls.vk2 = VerifyingKey.from_pem(key_str) - - cls.sk2 = SigningKey.generate(vk.curve) - - def test_load_key_with_explicit_parameters(self): - pub_key_str = ( - "-----BEGIN PUBLIC KEY-----\n" - "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" - "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" - "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" - "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" - "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" - "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" - "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" - "-----END PUBLIC KEY-----" - ) - pk = VerifyingKey.from_pem(pub_key_str) - - pk_exp = VerifyingKey.from_string( - b"\x04\x8a\xf5\x52\x48\x18\xb3\x98\xe6\x6c\x57\x3b\x8a\xdd\x7f" - b"\x60\x8d\x97\x4f\xff\xc8\x95\xa1\x23\x30\xd6\x5f\xb7\x5a\x30" - b"\x8e\xaa\x41\x60\x7d\x84\xac\x91\xe4\xe1\x4e\x4f\xa7\x85\xaf" - b"\x5a\xad\x71\x98\x7c\x08\x66\x5b\x07\xec\x88\x38\x2a\x67\x9f" - b"\x09\xf4\x7a\x4a\x65", - curve=NIST256p, - ) - self.assertEqual(pk, pk_exp) - - def test_load_key_with_explicit_with_explicit_disabled(self): - pub_key_str = ( - "-----BEGIN PUBLIC KEY-----\n" - "MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA\n" - "AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA////\n" - "///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd\n" - "NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5\n" - "RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA\n" - "//////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABIr1UkgYs5jmbFc7it1/YI2X\n" - "T//IlaEjMNZft1owjqpBYH2ErJHk4U5Pp4WvWq1xmHwIZlsH7Ig4KmefCfR6SmU=\n" - "-----END PUBLIC KEY-----" - ) - with self.assertRaises(UnexpectedDER): - VerifyingKey.from_pem( - pub_key_str, valid_curve_encodings=["named_curve"] - ) - - def test_load_key_with_disabled_format(self): - with self.assertRaises(MalformedPointError) as e: - VerifyingKey.from_der(self.key_bytes, valid_encodings=["raw"]) - - self.assertIn("enabled (raw) encodings", str(e.exception)) - - def test_custom_hashfunc(self): - vk = VerifyingKey.from_der(self.key_bytes, hashlib.sha256) - - self.assertIs(vk.default_hashfunc, hashlib.sha256) - - def test_from_pem_with_custom_hashfunc(self): - vk = VerifyingKey.from_pem(self.key_pem, hashlib.sha256) - - self.assertIs(vk.default_hashfunc, hashlib.sha256) - - def test_bytes(self): - vk = VerifyingKey.from_der(self.key_bytes) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytes_memoryview(self): - vk = VerifyingKey.from_der(buffer(self.key_bytes)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytearray(self): - vk = VerifyingKey.from_der(bytearray(self.key_bytes)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_bytesarray_memoryview(self): - vk = VerifyingKey.from_der(buffer(bytearray(self.key_bytes))) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_array_array_of_bytes(self): - arr = array.array("B", self.key_bytes) - vk = VerifyingKey.from_der(arr) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_array_array_of_bytes_memoryview(self): - arr = array.array("B", self.key_bytes) - vk = VerifyingKey.from_der(buffer(arr)) - - self.assertEqual(self.vk.to_string(), vk.to_string()) - - def test_equality_on_verifying_keys(self): - self.assertTrue(self.vk == self.sk.get_verifying_key()) - - def test_inequality_on_verifying_keys(self): - self.assertFalse(self.vk == self.vk2) - - def test_inequality_on_verifying_keys_not_implemented(self): - self.assertFalse(self.vk == None) - - def test_VerifyingKey_inequality_on_same_curve(self): - self.assertNotEqual(self.vk, self.sk2.verifying_key) - - def test_SigningKey_inequality_on_same_curve(self): - self.assertNotEqual(self.sk, self.sk2) - - def test_inequality_on_wrong_types(self): - self.assertFalse(self.vk == self.sk) - - def test_from_public_point_old(self): - pj = self.vk.pubkey.point - point = Point(pj.curve(), pj.x(), pj.y()) - - vk = VerifyingKey.from_public_point(point, self.vk.curve) - - self.assertTrue(vk == self.vk) - - def test_ed25519_VerifyingKey_repr__(self): - sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) - string = repr(sk.verifying_key) - - self.assertEqual( - "VerifyingKey.from_string(" - "bytearray(b'K\\x0c\\xfbZH\\x8e\\x8c\\x8c\\x07\\xee\\xda\\xfb" - "\\xe1\\x97\\xcd\\x90\\x18\\x02\\x15h]\\xfe\\xbe\\xcbB\\xba\\xe6r" - "\\x10\\xae\\xf1P'), Ed25519, None)", - string, - ) - - def test_edwards_from_public_point(self): - point = Ed25519.generator - with self.assertRaises(ValueError) as e: - VerifyingKey.from_public_point(point, Ed25519) - - self.assertIn("incompatible with Edwards", str(e.exception)) - - def test_edwards_precompute_no_side_effect(self): - sk = SigningKey.from_string(Ed25519.generator.to_bytes(), Ed25519) - vk = sk.verifying_key - vk2 = VerifyingKey.from_string(vk.to_string(), Ed25519) - vk.precompute() - - self.assertEqual(vk, vk2) - - def test_parse_malfomed_eddsa_der_pubkey(self): - der_str = encode_sequence( - encode_sequence(encode_oid(*Ed25519.oid)), - encode_bitstring(bytes(Ed25519.generator.to_bytes()), 0), - encode_bitstring(b"\x00", 0), - ) - - with self.assertRaises(UnexpectedDER) as e: - VerifyingKey.from_der(der_str) - - self.assertIn("trailing junk after public key", str(e.exception)) - - def test_edwards_from_public_key_recovery(self): - with self.assertRaises(ValueError) as e: - VerifyingKey.from_public_key_recovery(b"", b"", Ed25519) - - self.assertIn("unsupported for Edwards", str(e.exception)) - - def test_edwards_from_public_key_recovery_with_digest(self): - with self.assertRaises(ValueError) as e: - VerifyingKey.from_public_key_recovery_with_digest( - b"", b"", Ed25519 - ) - - self.assertIn("unsupported for Edwards", str(e.exception)) - - def test_load_ed25519_from_pem(self): - vk_pem = ( - "-----BEGIN PUBLIC KEY-----\n" - "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" - "-----END PUBLIC KEY-----\n" - ) - - vk = VerifyingKey.from_pem(vk_pem) - - self.assertIsInstance(vk.curve, Curve) - self.assertIs(vk.curve, Ed25519) - - vk_str = ( - b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" - b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" - ) - - vk_2 = VerifyingKey.from_string(vk_str, Ed25519) - - self.assertEqual(vk, vk_2) - - def test_export_ed255_to_pem(self): - vk_str = ( - b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" - b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" - ) - - vk = VerifyingKey.from_string(vk_str, Ed25519) - - vk_pem = ( - b"-----BEGIN PUBLIC KEY-----\n" - b"MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" - b"-----END PUBLIC KEY-----\n" - ) - - self.assertEqual(vk_pem, vk.to_pem()) - - def test_export_ed255_to_ssh(self): - vk_str = ( - b"\x23\x00\x50\xd0\xd6\x64\x22\x28\x8e\xe3\x55\x89\x7e\x6e\x41\x57" - b"\x8d\xae\xde\x44\x26\xee\x56\x27\xbc\x85\xe6\x0b\x2f\x2a\xcb\x65" - ) - - vk = VerifyingKey.from_string(vk_str, Ed25519) - - vk_ssh = b"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICMAUNDWZCIojuNViX5uQVeNrt5EJu5WJ7yF5gsvKstl\n" - - self.assertEqual(vk_ssh, vk.to_ssh()) - - def test_ed25519_export_import(self): - sk = SigningKey.generate(Ed25519) - vk = sk.verifying_key - - vk2 = VerifyingKey.from_pem(vk.to_pem()) - - self.assertEqual(vk, vk2) - - def test_ed25519_sig_verify(self): - vk_pem = ( - "-----BEGIN PUBLIC KEY-----\n" - "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" - "-----END PUBLIC KEY-----\n" - ) - - vk = VerifyingKey.from_pem(vk_pem) - - data = b"data\n" - - # signature created by OpenSSL 3.0.0 beta1 - sig = ( - b"\x64\x47\xab\x6a\x33\xcd\x79\x45\xad\x98\x11\x6c\xb9\xf2\x20\xeb" - b"\x90\xd6\x50\xe3\xc7\x8f\x9f\x60\x10\xec\x75\xe0\x2f\x27\xd3\x96" - b"\xda\xe8\x58\x7f\xe0\xfe\x46\x5c\x81\xef\x50\xec\x29\x9f\xae\xd5" - b"\xad\x46\x3c\x91\x68\x83\x4d\xea\x8d\xa8\x19\x04\x04\x79\x03\x0b" - ) - - self.assertTrue(vk.verify(sig, data)) - - def test_ed25519_sig_verify_malformed(self): - vk_pem = ( - "-----BEGIN PUBLIC KEY-----\n" - "MCowBQYDK2VwAyEAIwBQ0NZkIiiO41WJfm5BV42u3kQm7lYnvIXmCy8qy2U=\n" - "-----END PUBLIC KEY-----\n" - ) - - vk = VerifyingKey.from_pem(vk_pem) - - data = b"data\n" - - # modified signature from test_ed25519_sig_verify - sig = ( - b"\xAA\x47\xab\x6a\x33\xcd\x79\x45\xad\x98\x11\x6c\xb9\xf2\x20\xeb" - b"\x90\xd6\x50\xe3\xc7\x8f\x9f\x60\x10\xec\x75\xe0\x2f\x27\xd3\x96" - b"\xda\xe8\x58\x7f\xe0\xfe\x46\x5c\x81\xef\x50\xec\x29\x9f\xae\xd5" - b"\xad\x46\x3c\x91\x68\x83\x4d\xea\x8d\xa8\x19\x04\x04\x79\x03\x0b" - ) - - with self.assertRaises(BadSignatureError): - vk.verify(sig, data) - - def test_ed448_from_pem(self): - pem_str = ( - "-----BEGIN PUBLIC KEY-----\n" - "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" - "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" - "-----END PUBLIC KEY-----\n" - ) - - vk = VerifyingKey.from_pem(pem_str) - - self.assertIsInstance(vk.curve, Curve) - self.assertIs(vk.curve, Ed448) - - vk_str = ( - b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" - b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" - b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" - b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" - ) - - vk2 = VerifyingKey.from_string(vk_str, Ed448) - - self.assertEqual(vk, vk2) - - def test_ed448_to_pem(self): - vk_str = ( - b"\x79\x0b\x5e\xb5\x2b\xbb\x08\xc1\x33\x13\xe5\xd6\x07\x5d\x01\x83" - b"\x8e\xcb\x08\x0d\x20\x88\xd8\xa4\x3b\x11\xf3\x76\x9f\xad\x67\xf7" - b"\x8a\xfc\x49\xf4\x75\x37\x58\x0f\x69\x65\xf7\x88\x39\xf0\xc8\x52" - b"\x9c\x18\x81\x40\x1f\x40\xd4\xc3\x00" - ) - vk = VerifyingKey.from_string(vk_str, Ed448) - - vk_pem = ( - b"-----BEGIN PUBLIC KEY-----\n" - b"MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0dTdYD2ll94g5\n" - b"8MhSnBiBQB9A1MMA\n" - b"-----END PUBLIC KEY-----\n" - ) - - self.assertEqual(vk_pem, vk.to_pem()) - - def test_ed448_export_import(self): - sk = SigningKey.generate(Ed448) - vk = sk.verifying_key - - vk2 = VerifyingKey.from_pem(vk.to_pem()) - - self.assertEqual(vk, vk2) - - def test_ed448_sig_verify(self): - pem_str = ( - "-----BEGIN PUBLIC KEY-----\n" - "MEMwBQYDK2VxAzoAeQtetSu7CMEzE+XWB10Bg47LCA0giNikOxHzdp+tZ/eK/En0\n" - "dTdYD2ll94g58MhSnBiBQB9A1MMA\n" - "-----END PUBLIC KEY-----\n" - ) - - vk = VerifyingKey.from_pem(pem_str) - - data = b"data\n" - - # signature created by OpenSSL 3.0.0 beta1 - sig = ( - b"\x68\xed\x2c\x70\x35\x22\xca\x1c\x35\x03\xf3\xaa\x51\x33\x3d\x00" - b"\xc0\xae\xb0\x54\xc5\xdc\x7f\x6f\x30\x57\xb4\x1d\xcb\xe9\xec\xfa" - b"\xc8\x45\x3e\x51\xc1\xcb\x60\x02\x6a\xd0\x43\x11\x0b\x5f\x9b\xfa" - b"\x32\x88\xb2\x38\x6b\xed\xac\x09\x00\x78\xb1\x7b\x5d\x7e\xf8\x16" - b"\x31\xdd\x1b\x3f\x98\xa0\xce\x19\xe7\xd8\x1c\x9f\x30\xac\x2f\xd4" - b"\x1e\x55\xbf\x21\x98\xf6\x4c\x8c\xbe\x81\xa5\x2d\x80\x4c\x62\x53" - b"\x91\xd5\xee\x03\x30\xc6\x17\x66\x4b\x9e\x0c\x8d\x40\xd0\xad\xae" - b"\x0a\x00" - ) - - self.assertTrue(vk.verify(sig, data)) - - -class TestSigningKey(unittest.TestCase): - """ - Verify that ecdsa.keys.SigningKey.from_der() can be used with - bytes-like objects. - """ - - @classmethod - def setUpClass(cls): - prv_key_str = ( - "-----BEGIN EC PRIVATE KEY-----\n" - "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" - "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" - "bA==\n" - "-----END EC PRIVATE KEY-----\n" - ) - cls.sk1 = SigningKey.from_pem(prv_key_str) - - prv_key_str = ( - "-----BEGIN PRIVATE KEY-----\n" - "MG8CAQAwEwYHKoZIzj0CAQYIKoZIzj0DAQEEVTBTAgEBBBheyEIL1u+SUqlC6YkE\n" - "PKKfVh+lJXcOscWhNAMyAAS4gXfQhO8X9eRWOUCAKDYPn1m0pNcmTmLaBlHc5Ho1\n" - "pMW0XPUVk0I6i1V7nCCZ82w=\n" - "-----END PRIVATE KEY-----\n" - ) - cls.sk1_pkcs8 = SigningKey.from_pem(prv_key_str) - - prv_key_str = ( - "-----BEGIN EC PRIVATE KEY-----\n" - "MHcCAQEEIKlL2EAm5NPPZuXwxRf4nXMk0A80y6UUbiQ17be/qFhRoAoGCCqGSM49\n" - "AwEHoUQDQgAE4H3iRbG4TSrsSRb/gusPQB/4YcN8Poqzgjau4kfxBPyZimeRfuY/\n" - "9g/wMmPuhGl4BUve51DsnKJFRr8psk0ieA==\n" - "-----END EC PRIVATE KEY-----\n" - ) - cls.sk2 = SigningKey.from_pem(prv_key_str) - - def test_to_der_pkcs8(self): - self.assertEqual( - self.sk1.to_der(format="pkcs8"), - b"0o\x02\x01\x010\x13\x06\x07*\x86H\xce=\x02\x01\x06\x08*\x86H" - b"\xce=\x03\x01\x01\x04U0S\x02\x01\x01\x04\x18^\xc8B\x0b\xd6\xef" - b"\x92R\xa9B\xe9\x89\x04<\xa2\x9fV\x1f\xa5%w\x0e\xb1\xc5\xa14\x03" - b"2\x00\x04\xb8\x81w\xd0\x84\xef\x17\xf5\xe4V9@\x80(6\x0f\x9fY" - b"\xb4\xa4\xd7&Nb\xda\x06Q\xdc\xe4z5\xa4\xc5\xb4\\\xf5\x15\x93B:" - b"\x8bU{\x9c \x99\xf3l", - ) - - def test_decoding_explicit_curve_parameters(self): - prv_key_str = ( - "-----BEGIN PRIVATE KEY-----\n" - "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" - "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" - "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" - "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" - "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" - "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" - "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" - "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" - "-----END PRIVATE KEY-----\n" - ) - - sk = SigningKey.from_pem(prv_key_str) - - sk2 = SigningKey.from_string( - b"\x21\x7b\x51\x11\xf5\x26\x47\x5e\xab\x65\xb9\xaf\x0c\x60\xf6" - b"\x94\x01\x05\x3d\x96\xb6\x0c\xb3\xf2\xcf\x47\x33\x4a\x36\xb9" - b"\xf3\x20", - curve=NIST256p, - ) - - self.assertEqual(sk, sk2) - - def test_decoding_explicit_curve_parameters_with_explicit_disabled(self): - prv_key_str = ( - "-----BEGIN PRIVATE KEY-----\n" - "MIIBeQIBADCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAAB\n" - "AAAAAAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA\n" - "///////////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMV\n" - "AMSdNgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg\n" - "9KE5RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8A\n" - "AAAA//////////+85vqtpxeehPO5ysL8YyVRAgEBBG0wawIBAQQgIXtREfUmR16r\n" - "ZbmvDGD2lAEFPZa2DLPyz0czSja58yChRANCAASK9VJIGLOY5mxXO4rdf2CNl0//\n" - "yJWhIzDWX7daMI6qQWB9hKyR5OFOT6eFr1qtcZh8CGZbB+yIOCpnnwn0ekpl\n" - "-----END PRIVATE KEY-----\n" - ) - - with self.assertRaises(UnexpectedDER): - SigningKey.from_pem( - prv_key_str, valid_curve_encodings=["named_curve"] - ) - - def test_equality_on_signing_keys(self): - sk = SigningKey.from_secret_exponent( - self.sk1.privkey.secret_multiplier, self.sk1.curve - ) - self.assertEqual(self.sk1, sk) - self.assertEqual(self.sk1_pkcs8, sk) - - def test_verify_with_empty_message(self): - sig = self.sk1.sign(b"") - - self.assertTrue(sig) - - vk = self.sk1.verifying_key - - self.assertTrue(vk.verify(sig, b"")) - - def test_verify_with_precompute(self): - sig = self.sk1.sign(b"message") - - vk = self.sk1.verifying_key - - vk.precompute() - - self.assertTrue(vk.verify(sig, b"message")) - - def test_compare_verifying_key_with_precompute(self): - vk1 = self.sk1.verifying_key - vk1.precompute() - - vk2 = self.sk1_pkcs8.verifying_key - - self.assertEqual(vk1, vk2) - - def test_verify_with_lazy_precompute(self): - sig = self.sk2.sign(b"other message") - - vk = self.sk2.verifying_key - - vk.precompute(lazy=True) - - self.assertTrue(vk.verify(sig, b"other message")) - - def test_inequality_on_signing_keys(self): - self.assertNotEqual(self.sk1, self.sk2) - - def test_inequality_on_signing_keys_not_implemented(self): - self.assertNotEqual(self.sk1, None) - - def test_ed25519_from_pem(self): - pem_str = ( - "-----BEGIN PRIVATE KEY-----\n" - "MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" - "-----END PRIVATE KEY-----\n" - ) - - sk = SigningKey.from_pem(pem_str) - - sk_str = SigningKey.from_string( - b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" - b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", - Ed25519, - ) - - self.assertEqual(sk, sk_str) - - def test_ed25519_from_der_bad_alg_id_params(self): - der_str = encode_sequence( - encode_integer(1), - encode_sequence(encode_oid(*Ed25519.oid), encode_integer(1)), - encode_octet_string(encode_octet_string(b"A" * 32)), - ) - - with self.assertRaises(UnexpectedDER) as e: - SigningKey.from_der(der_str) - - self.assertIn("Non NULL parameters", str(e.exception)) - - def test_ed25519_from_der_junk_after_priv_key(self): - der_str = encode_sequence( - encode_integer(1), - encode_sequence( - encode_oid(*Ed25519.oid), - ), - encode_octet_string(encode_octet_string(b"A" * 32) + b"B"), - ) - - with self.assertRaises(UnexpectedDER) as e: - SigningKey.from_der(der_str) - - self.assertIn( - "trailing junk after the encoded private key", str(e.exception) - ) - - def test_ed25519_sign(self): - sk_str = SigningKey.from_string( - b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" - b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", - Ed25519, - ) - - msg = b"message" - - sig = sk_str.sign(msg, sigencode=sigencode_der) - - self.assertEqual( - sig, - b"\xe1,v\xc9>%\xda\xd2~>\xc3&\na\xf4@|\x9e`X\x11\x13@<\x987\xd4" - b"\r\xb1\xf5\xb3\x15\x7f%i{\xdf}\xdd\xb1\xf3\x02\x7f\x80\x02\xc2" - b'|\xe5\xd6\x06\xc4\n\xa3\xb0\xf6}\xc0\xed)"+E\xaf\x00', - ) - - def test_ed25519_sign_digest_deterministic(self): - sk_str = SigningKey.from_string( - b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" - b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", - Ed25519, - ) - with self.assertRaises(ValueError) as e: - sk_str.sign_digest_deterministic(b"a" * 20) - - self.assertIn("Method unsupported for Edwards", str(e.exception)) - - def test_ed25519_sign_digest(self): - sk_str = SigningKey.from_string( - b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" - b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", - Ed25519, - ) - with self.assertRaises(ValueError) as e: - sk_str.sign_digest(b"a" * 20) - - self.assertIn("Method unsupported for Edwards", str(e.exception)) - - def test_ed25519_sign_number(self): - sk_str = SigningKey.from_string( - b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" - b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", - Ed25519, - ) - with self.assertRaises(ValueError) as e: - sk_str.sign_number(20) - - self.assertIn("Method unsupported for Edwards", str(e.exception)) - - def test_ed25519_to_der_ssleay(self): - pem_str = ( - "-----BEGIN PRIVATE KEY-----\n" - "MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" - "-----END PRIVATE KEY-----\n" - ) - - sk = SigningKey.from_pem(pem_str) - - with self.assertRaises(ValueError) as e: - sk.to_der(format="ssleay") - - self.assertIn("Only PKCS#8 format", str(e.exception)) - - def test_ed25519_to_pem(self): - sk = SigningKey.from_string( - b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" - b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", - Ed25519, - ) - - pem_str = ( - b"-----BEGIN PRIVATE KEY-----\n" - b"MC4CAQAwBQYDK2VwBCIEIDS6x9FO1PG8T4xIPg8Zd0z8uL6sVGZFEZrX17gHC/XU\n" - b"-----END PRIVATE KEY-----\n" - ) - - self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) - - def test_ed25519_to_ssh(self): - sk = SigningKey.from_string( - b"\x34\xBA\xC7\xD1\x4E\xD4\xF1\xBC\x4F\x8C\x48\x3E\x0F\x19\x77\x4C" - b"\xFC\xB8\xBE\xAC\x54\x66\x45\x11\x9A\xD7\xD7\xB8\x07\x0B\xF5\xD4", - Ed25519, - ) - - ssh_str = ( - b"-----BEGIN OPENSSH PRIVATE KEY-----\n" - b"b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZWQyNTUx\n" - b"OQAAACAjAFDQ1mQiKI7jVYl+bkFXja7eRCbuVie8heYLLyrLZQAAAIgAAAAAAAAAAAAAAAtzc2gt\n" - b"ZWQyNTUxOQAAACAjAFDQ1mQiKI7jVYl+bkFXja7eRCbuVie8heYLLyrLZQAAAEA0usfRTtTxvE+M\n" - b"SD4PGXdM/Li+rFRmRRGa19e4Bwv11CMAUNDWZCIojuNViX5uQVeNrt5EJu5WJ7yF5gsvKstlAAAA\n" - b"AAECAwQF\n" - b"-----END OPENSSH PRIVATE KEY-----\n" - ) - - self.assertEqual(sk.to_ssh(), ssh_str) - - def test_ed25519_to_and_from_pem(self): - sk = SigningKey.generate(Ed25519) - - decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) - - self.assertEqual(sk, decoded) - - def test_ed25519_custom_entropy(self): - sk = SigningKey.generate(Ed25519, entropy=os.urandom) - - self.assertIsNotNone(sk) - - def test_ed25519_from_secret_exponent(self): - with self.assertRaises(ValueError) as e: - SigningKey.from_secret_exponent(1234567890, curve=Ed25519) - - self.assertIn("don't support setting the secret", str(e.exception)) - - def test_ed448_from_pem(self): - pem_str = ( - "-----BEGIN PRIVATE KEY-----\n" - "MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmP\n" - "OP0JMYaLGlTzwovmvCDJ2zLaezu9NLz9aQ==\n" - "-----END PRIVATE KEY-----\n" - ) - sk = SigningKey.from_pem(pem_str) - - sk_str = SigningKey.from_string( - b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" - b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" - b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" - b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", - Ed448, - ) - - self.assertEqual(sk, sk_str) - - def test_ed448_to_pem(self): - sk = SigningKey.from_string( - b"\x3C\x85\xB9\x7A\x85\x2D\x78\x09\x95\x5F\x2E\x0E\xA7\x0E\xC3\xD9" - b"\xC6\xE0\x8A\xB3\x2E\x26\x7F\x8B\x93\x5F\x04\x3A\x07\x3F\x39\x8F" - b"\x38\xFD\x09\x31\x86\x8B\x1A\x54\xF3\xC2\x8B\xE6\xBC\x20\xC9\xDB" - b"\x32\xDA\x7B\x3B\xBD\x34\xBC\xFD\x69", - Ed448, - ) - pem_str = ( - b"-----BEGIN PRIVATE KEY-----\n" - b"MEcCAQAwBQYDK2VxBDsEOTyFuXqFLXgJlV8uDqcOw9nG4IqzLiZ/i5NfBDoHPzmPOP0JMYaLGlTz\n" - b"wovmvCDJ2zLaezu9NLz9aQ==\n" - b"-----END PRIVATE KEY-----\n" - ) - - self.assertEqual(sk.to_pem(format="pkcs8"), pem_str) - - def test_ed448_encode_decode(self): - sk = SigningKey.generate(Ed448) - - decoded = SigningKey.from_pem(sk.to_pem(format="pkcs8")) - - self.assertEqual(decoded, sk) - - -class TestTrivialCurve(unittest.TestCase): - @classmethod - def setUpClass(cls): - # To test what happens with r or s in signing happens to be zero we - # need to find a scalar that creates one of the points on a curve that - # has x coordinate equal to zero. - # Even for secp112r2 curve that's non trivial so use this toy - # curve, for which we can iterate over all points quickly - curve = CurveFp(163, 84, 58) - gen = PointJacobi(curve, 2, 87, 1, 167, generator=True) - - cls.toy_curve = Curve("toy_p8", curve, gen, (1, 2, 0)) - - cls.sk = SigningKey.from_secret_exponent( - 140, - cls.toy_curve, - hashfunc=hashlib.sha1, - ) - - def test_generator_sanity(self): - gen = self.toy_curve.generator - - self.assertEqual(gen * gen.order(), INFINITY) - - def test_public_key_sanity(self): - self.assertEqual(self.sk.verifying_key.to_string(), b"\x98\x1e") - - def test_deterministic_sign(self): - sig = self.sk.sign_deterministic(b"message") - - self.assertEqual(sig, b"-.") - - self.assertTrue(self.sk.verifying_key.verify(sig, b"message")) - - def test_deterministic_sign_random_message(self): - msg = os.urandom(32) - sig = self.sk.sign_deterministic(msg) - self.assertEqual(len(sig), 2) - self.assertTrue(self.sk.verifying_key.verify(sig, msg)) - - def test_deterministic_sign_that_rises_R_zero_error(self): - # the raised RSZeroError is caught and handled internally by - # sign_deterministic methods - msg = b"\x00\x4f" - sig = self.sk.sign_deterministic(msg) - self.assertEqual(sig, b"\x36\x9e") - self.assertTrue(self.sk.verifying_key.verify(sig, msg)) - - def test_deterministic_sign_that_rises_S_zero_error(self): - msg = b"\x01\x6d" - sig = self.sk.sign_deterministic(msg) - self.assertEqual(sig, b"\x49\x6c") - self.assertTrue(self.sk.verifying_key.verify(sig, msg)) - - -# test VerifyingKey.verify() -prv_key_str = ( - "-----BEGIN EC PRIVATE KEY-----\n" - "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" - "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" - "bA==\n" - "-----END EC PRIVATE KEY-----\n" -) -key_bytes = unpem(prv_key_str) -assert isinstance(key_bytes, bytes) -sk = SigningKey.from_der(key_bytes) -vk = sk.verifying_key - -data = ( - b"some string for signing" - b"contents don't really matter" - b"but do include also some crazy values: " - b"\x00\x01\t\r\n\x00\x00\x00\xff\xf0" -) -assert len(data) % 4 == 0 -sha1 = hashlib.sha1() -sha1.update(data) -data_hash = sha1.digest() -assert isinstance(data_hash, bytes) -sig_raw = sk.sign(data, sigencode=sigencode_string) -assert isinstance(sig_raw, bytes) -sig_der = sk.sign(data, sigencode=sigencode_der) -assert isinstance(sig_der, bytes) -sig_strings = sk.sign(data, sigencode=sigencode_strings) -assert isinstance(sig_strings[0], bytes) - -verifiers = [] -for modifier, fun in [ - ("bytes", lambda x: x), - ("bytes memoryview", buffer), - ("bytearray", bytearray), - ("bytearray memoryview", lambda x: buffer(bytearray(x))), - ("array.array of bytes", lambda x: array.array("B", x)), - ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), - ("array.array of ints", lambda x: array.array("I", x)), - ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), -]: - if "ints" in modifier: - conv = lambda x: x - else: - conv = fun - for sig_format, signature, decoder, mod_apply in [ - ("raw", sig_raw, sigdecode_string, lambda x: conv(x)), - ("der", sig_der, sigdecode_der, lambda x: conv(x)), - ( - "strings", - sig_strings, - sigdecode_strings, - lambda x: tuple(conv(i) for i in x), - ), - ]: - for method_name, vrf_mthd, vrf_data in [ - ("verify", vk.verify, data), - ("verify_digest", vk.verify_digest, data_hash), - ]: - verifiers.append( - pytest.param( - signature, - decoder, - mod_apply, - fun, - vrf_mthd, - vrf_data, - id="{2}-{0}-{1}".format(modifier, sig_format, method_name), - ) - ) - - -@pytest.mark.parametrize( - "signature,decoder,mod_apply,fun,vrf_mthd,vrf_data", verifiers -) -def test_VerifyingKey_verify( - signature, decoder, mod_apply, fun, vrf_mthd, vrf_data -): - sig = mod_apply(signature) - - assert vrf_mthd(sig, fun(vrf_data), sigdecode=decoder) - - -# test SigningKey.from_string() -prv_key_bytes = ( - b"^\xc8B\x0b\xd6\xef\x92R\xa9B\xe9\x89\x04<\xa2" - b"\x9fV\x1f\xa5%w\x0e\xb1\xc5" -) -assert len(prv_key_bytes) == 24 -converters = [] -for modifier, convert in [ - ("bytes", lambda x: x), - ("bytes memoryview", buffer), - ("bytearray", bytearray), - ("bytearray memoryview", lambda x: buffer(bytearray(x))), - ("array.array of bytes", lambda x: array.array("B", x)), - ("array.array of bytes memoryview", lambda x: buffer(array.array("B", x))), - ("array.array of ints", lambda x: array.array("I", x)), - ("array.array of ints memoryview", lambda x: buffer(array.array("I", x))), -]: - converters.append(pytest.param(convert, id=modifier)) - - -@pytest.mark.parametrize("convert", converters) -def test_SigningKey_from_string(convert): - key = convert(prv_key_bytes) - sk = SigningKey.from_string(key) - - assert sk.to_string() == prv_key_bytes - - -# test SigningKey.from_der() -prv_key_str = ( - "-----BEGIN EC PRIVATE KEY-----\n" - "MF8CAQEEGF7IQgvW75JSqULpiQQ8op9WH6Uldw6xxaAKBggqhkjOPQMBAaE0AzIA\n" - "BLiBd9CE7xf15FY5QIAoNg+fWbSk1yZOYtoGUdzkejWkxbRc9RWTQjqLVXucIJnz\n" - "bA==\n" - "-----END EC PRIVATE KEY-----\n" -) -key_bytes = unpem(prv_key_str) -assert isinstance(key_bytes, bytes) - -# last two converters are for array.array of ints, those require input -# that's multiple of 4, which no curve we support produces -@pytest.mark.parametrize("convert", converters[:-2]) -def test_SigningKey_from_der(convert): - key = convert(key_bytes) - sk = SigningKey.from_der(key) - - assert sk.to_string() == prv_key_bytes - - -# test SigningKey.sign_deterministic() -extra_entropy = b"\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11" - - -@pytest.mark.parametrize("convert", converters) -def test_SigningKey_sign_deterministic(convert): - sig = sk.sign_deterministic( - convert(data), extra_entropy=convert(extra_entropy) - ) - - vk.verify(sig, data) - - -# test SigningKey.sign_digest_deterministic() -@pytest.mark.parametrize("convert", converters) -def test_SigningKey_sign_digest_deterministic(convert): - sig = sk.sign_digest_deterministic( - convert(data_hash), extra_entropy=convert(extra_entropy) - ) - - vk.verify(sig, data) - - -@pytest.mark.parametrize("convert", converters) -def test_SigningKey_sign(convert): - sig = sk.sign(convert(data)) - - vk.verify(sig, data) - - -@pytest.mark.parametrize("convert", converters) -def test_SigningKey_sign_digest(convert): - sig = sk.sign_digest(convert(data_hash)) - - vk.verify(sig, data) - - -def test_SigningKey_with_unlikely_value(): - sk = SigningKey.from_secret_exponent(NIST256p.order - 1, curve=NIST256p) - vk = sk.verifying_key - sig = sk.sign(b"hello") - assert vk.verify(sig, b"hello") - - -def test_SigningKey_with_custom_curve_old_point(): - generator = generator_brainpoolp160r1 - generator = Point( - generator.curve(), - generator.x(), - generator.y(), - generator.order(), - ) - - curve = Curve( - "BRAINPOOLP160r1", - generator.curve(), - generator, - (1, 3, 36, 3, 3, 2, 8, 1, 1, 1), - ) - - sk = SigningKey.from_secret_exponent(12, curve) - - sk2 = SigningKey.from_secret_exponent(12, BRAINPOOLP160r1) - - assert sk.privkey == sk2.privkey - - -def test_VerifyingKey_inequality_with_different_curves(): - sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) - sk2 = SigningKey.from_secret_exponent(2, NIST256p) - - assert not (sk1.verifying_key == sk2.verifying_key) - - -def test_VerifyingKey_inequality_with_different_secret_points(): - sk1 = SigningKey.from_secret_exponent(2, BRAINPOOLP160r1) - sk2 = SigningKey.from_secret_exponent(3, BRAINPOOLP160r1) - - assert not (sk1.verifying_key == sk2.verifying_key) - - -def test_SigningKey_from_pem_pkcs8v2_EdDSA(): - pem = """-----BEGIN PRIVATE KEY----- - MFMCAQEwBQYDK2VwBCIEICc2F2ag1n1QP0jY+g9qWx5sDkx0s/HdNi3cSRHw+zsI - oSMDIQA+HQ2xCif8a/LMWR2m5HaCm5I2pKe/cc8OiRANMHxjKQ== - -----END PRIVATE KEY-----""" - - sk = SigningKey.from_pem(pem) - assert sk.curve == Ed25519 diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_malformed_sigs.py b/venv/lib/python3.12/site-packages/ecdsa/test_malformed_sigs.py deleted file mode 100644 index e5a87c28..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_malformed_sigs.py +++ /dev/null @@ -1,378 +0,0 @@ -from __future__ import with_statement, division - -import hashlib - -try: - from hashlib import algorithms_available -except ImportError: # pragma: no cover - algorithms_available = [ - "md5", - "sha1", - "sha224", - "sha256", - "sha384", - "sha512", - ] -# skip algorithms broken by change to OpenSSL 3.0 and early versions -# of hashlib that list algorithms that require the legacy provider to work -# https://bugs.python.org/issue38820 -algorithms_available = [ - i - for i in algorithms_available - if i not in ("mdc2", "md2", "md4", "whirlpool", "ripemd160") -] -from functools import partial -import pytest -import sys -import hypothesis.strategies as st -from hypothesis import note, assume, given, settings, example - -from .keys import SigningKey -from .keys import BadSignatureError -from .util import sigencode_der, sigencode_string -from .util import sigdecode_der, sigdecode_string -from .curves import curves, SECP112r2, SECP128r1 -from .der import ( - encode_integer, - encode_bitstring, - encode_octet_string, - encode_oid, - encode_sequence, - encode_constructed, -) -from .ellipticcurve import CurveEdTw - - -example_data = b"some data to sign" -"""Since the data is hashed for processing, really any string will do.""" - - -hash_and_size = [ - (name, hashlib.new(name).digest_size) for name in algorithms_available -] -"""Pairs of hash names and their output sizes. -Needed for pairing with curves as we don't support hashes -bigger than order sizes of curves.""" - - -if "--fast" in sys.argv: # pragma: no cover - curves = [SECP112r2, SECP128r1] - - -keys_and_sigs = [] -"""Name of the curve+hash combination, VerifyingKey and DER signature.""" - - -# for hypothesis strategy shrinking we want smallest curves and hashes first -for curve in sorted(curves, key=lambda x: x.baselen): - for hash_alg in [ - name - for name, size in sorted(hash_and_size, key=lambda x: x[1]) - if 0 < size <= curve.baselen - ]: - sk = SigningKey.generate( - curve, hashfunc=partial(hashlib.new, hash_alg) - ) - - keys_and_sigs.append( - ( - "{0} {1}".format(curve, hash_alg), - sk.verifying_key, - sk.sign(example_data, sigencode=sigencode_der), - ) - ) - - -# first make sure that the signatures can be verified -@pytest.mark.parametrize( - "verifying_key,signature", - [pytest.param(vk, sig, id=name) for name, vk, sig in keys_and_sigs], -) -def test_signatures(verifying_key, signature): - assert verifying_key.verify( - signature, example_data, sigdecode=sigdecode_der - ) - - -@st.composite -def st_fuzzed_sig(draw, keys_and_sigs): # pragma: no cover - """ - Hypothesis strategy that generates pairs of VerifyingKey and malformed - signatures created by fuzzing of a valid signature. - """ - name, verifying_key, old_sig = draw(st.sampled_from(keys_and_sigs)) - note("Configuration: {0}".format(name)) - - sig = bytearray(old_sig) - - # decide which bytes should be removed - to_remove = draw( - st.lists(st.integers(min_value=0, max_value=len(sig) - 1), unique=True) - ) - to_remove.sort() - for i in reversed(to_remove): - del sig[i] - note("Remove bytes: {0}".format(to_remove)) - - # decide which bytes of the original signature should be changed - xors = None - if sig: # pragma: no branch - xors = draw( - st.dictionaries( - st.integers(min_value=0, max_value=len(sig) - 1), - st.integers(min_value=1, max_value=255), - ) - ) - for i, val in xors.items(): - sig[i] ^= val - note("xors: {0}".format(xors)) - - # decide where new data should be inserted - insert_pos = draw(st.integers(min_value=0, max_value=len(sig))) - # NIST521p signature is about 140 bytes long, test slightly longer - insert_data = draw(st.binary(max_size=256)) - - sig = sig[:insert_pos] + insert_data + sig[insert_pos:] - note( - "Inserted at position {0} bytes: {1!r}".format(insert_pos, insert_data) - ) - - sig = bytes(sig) - # make sure that there was performed at least one mutation on the data - assume(to_remove or xors or insert_data) - # and that the mutations didn't cancel each-other out - assume(sig != old_sig) - - return verifying_key, sig - - -params = {} -# not supported in hypothesis 2.0.0 -if sys.version_info >= (2, 7): # pragma: no branch - from hypothesis import HealthCheck - - # deadline=5s because NIST521p are slow to verify - params["deadline"] = 5000 - params["suppress_health_check"] = [ - HealthCheck.data_too_large, - HealthCheck.filter_too_much, - HealthCheck.too_slow, - ] -if "--fast" in sys.argv: # pragma: no cover - params["max_examples"] = 20 - -slow_params = dict(params) -if "--fast" in sys.argv: # pragma: no cover - slow_params["max_examples"] = 1 -else: - slow_params["max_examples"] = 10 - - -@settings(**slow_params) -@given(st_fuzzed_sig(keys_and_sigs)) -def test_fuzzed_der_signatures(args): - verifying_key, sig = args - - with pytest.raises(BadSignatureError): - verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) - - -@st.composite -def st_random_der_ecdsa_sig_value(draw): # pragma: no cover - """ - Hypothesis strategy for selecting random values and encoding them - to ECDSA-Sig-Value object:: - - ECDSA-Sig-Value ::= SEQUENCE { - r INTEGER, - s INTEGER - } - """ - name, verifying_key, _ = draw(st.sampled_from(keys_and_sigs)) - note("Configuration: {0}".format(name)) - order = int(verifying_key.curve.order) - - # the encode_integer doesn't support negative numbers, would be nice - # to generate them too, but we have coverage for remove_integer() - # verifying that it doesn't accept them, so meh. - # Test all numbers around the ones that can show up (around order) - # way smaller and slightly bigger - r = draw( - st.integers(min_value=0, max_value=order << 4) - | st.integers(min_value=order >> 2, max_value=order + 1) - ) - s = draw( - st.integers(min_value=0, max_value=order << 4) - | st.integers(min_value=order >> 2, max_value=order + 1) - ) - - sig = encode_sequence(encode_integer(r), encode_integer(s)) - - return verifying_key, sig - - -@settings(**slow_params) -@given(st_random_der_ecdsa_sig_value()) -def test_random_der_ecdsa_sig_value(params): - """ - Check if random values encoded in ECDSA-Sig-Value structure are rejected - as signature. - """ - verifying_key, sig = params - - with pytest.raises(BadSignatureError): - verifying_key.verify(sig, example_data, sigdecode=sigdecode_der) - - -def st_der_integer(*args, **kwargs): # pragma: no cover - """ - Hypothesis strategy that returns a random positive integer as DER - INTEGER. - Parameters are passed to hypothesis.strategy.integer. - """ - if "min_value" not in kwargs: # pragma: no branch - kwargs["min_value"] = 0 - return st.builds(encode_integer, st.integers(*args, **kwargs)) - - -@st.composite -def st_der_bit_string(draw, *args, **kwargs): # pragma: no cover - """ - Hypothesis strategy that returns a random DER BIT STRING. - Parameters are passed to hypothesis.strategy.binary. - """ - data = draw(st.binary(*args, **kwargs)) - if data: - unused = draw(st.integers(min_value=0, max_value=7)) - data = bytearray(data) - data[-1] &= -(2**unused) - data = bytes(data) - else: - unused = 0 - return encode_bitstring(data, unused) - - -def st_der_octet_string(*args, **kwargs): # pragma: no cover - """ - Hypothesis strategy that returns a random DER OCTET STRING object. - Parameters are passed to hypothesis.strategy.binary - """ - return st.builds(encode_octet_string, st.binary(*args, **kwargs)) - - -def st_der_null(): # pragma: no cover - """ - Hypothesis strategy that returns DER NULL object. - """ - return st.just(b"\x05\x00") - - -@st.composite -def st_der_oid(draw): # pragma: no cover - """ - Hypothesis strategy that returns DER OBJECT IDENTIFIER objects. - """ - first = draw(st.integers(min_value=0, max_value=2)) - if first < 2: - second = draw(st.integers(min_value=0, max_value=39)) - else: - second = draw(st.integers(min_value=0, max_value=2**512)) - rest = draw( - st.lists(st.integers(min_value=0, max_value=2**512), max_size=50) - ) - return encode_oid(first, second, *rest) - - -def st_der(): # pragma: no cover - """ - Hypothesis strategy that returns random DER structures. - - A valid DER structure is any primitive object, an octet encoding - of a valid DER structure, sequence of valid DER objects or a constructed - encoding of any of the above. - """ - return st.recursive( # pragma: no branch - st.just(b"") - | st_der_integer(max_value=2**4096) - | st_der_bit_string(max_size=1024**2) - | st_der_octet_string(max_size=1024**2) - | st_der_null() - | st_der_oid(), - lambda children: st.builds(encode_octet_string, st.one_of(children)) - | st.builds(lambda x: encode_bitstring(x, 0), st.one_of(children)) - | st.builds( - lambda x: encode_sequence(*x), st.lists(children, max_size=200) - ) - | st.builds( - encode_constructed, - st.integers(min_value=0, max_value=0x3F), - st.one_of(children), - ), - max_leaves=40, - ) - - -@settings(**slow_params) -@given(st.sampled_from(keys_and_sigs), st_der()) -def test_random_der_as_signature(params, der): - """Check if random DER structures are rejected as signature""" - name, verifying_key, _ = params - - with pytest.raises(BadSignatureError): - verifying_key.verify(der, example_data, sigdecode=sigdecode_der) - - -@settings(**slow_params) -@given(st.sampled_from(keys_and_sigs), st.binary(max_size=1024**2)) -@example( - keys_and_sigs[0], encode_sequence(encode_integer(0), encode_integer(0)) -) -@example( - keys_and_sigs[0], - encode_sequence(encode_integer(1), encode_integer(1)) + b"\x00", -) -@example(keys_and_sigs[0], encode_sequence(*[encode_integer(1)] * 3)) -def test_random_bytes_as_signature(params, der): - """Check if random bytes are rejected as signature""" - name, verifying_key, _ = params - - with pytest.raises(BadSignatureError): - verifying_key.verify(der, example_data, sigdecode=sigdecode_der) - - -keys_and_string_sigs = [ - ( - name, - verifying_key, - sigencode_string( - *sigdecode_der(sig, verifying_key.curve.order), - order=verifying_key.curve.order - ), - ) - for name, verifying_key, sig in keys_and_sigs - if not isinstance(verifying_key.curve.curve, CurveEdTw) -] -""" -Name of the curve+hash combination, VerifyingKey and signature as a -byte string. -""" - - -keys_and_string_sigs += [ - ( - name, - verifying_key, - sig, - ) - for name, verifying_key, sig in keys_and_sigs - if isinstance(verifying_key.curve.curve, CurveEdTw) -] - - -@settings(**slow_params) -@given(st_fuzzed_sig(keys_and_string_sigs)) -def test_fuzzed_string_signatures(params): - verifying_key, sig = params - - with pytest.raises(BadSignatureError): - verifying_key.verify(sig, example_data, sigdecode=sigdecode_string) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_numbertheory.py b/venv/lib/python3.12/site-packages/ecdsa/test_numbertheory.py deleted file mode 100644 index 966eca29..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_numbertheory.py +++ /dev/null @@ -1,483 +0,0 @@ -import operator -from functools import reduce -import sys - -try: - import unittest2 as unittest -except ImportError: - import unittest -import hypothesis.strategies as st -import pytest -from hypothesis import given, settings, example - -try: - from hypothesis import HealthCheck - - HC_PRESENT = True -except ImportError: # pragma: no cover - HC_PRESENT = False -from .numbertheory import ( - SquareRootError, - JacobiError, - factorization, - gcd, - lcm, - jacobi, - inverse_mod, - is_prime, - next_prime, - smallprimes, - square_root_mod_prime, -) - -try: - from gmpy2 import mpz -except ImportError: - try: - from gmpy import mpz - except ImportError: - - def mpz(x): - return x - - -BIGPRIMES = ( - 999671, - 999683, - 999721, - 999727, - 999749, - 999763, - 999769, - 999773, - 999809, - 999853, - 999863, - 999883, - 999907, - 999917, - 999931, - 999953, - 999959, - 999961, - 999979, - 999983, -) - - -@pytest.mark.parametrize( - "prime, next_p", [(p, q) for p, q in zip(BIGPRIMES[:-1], BIGPRIMES[1:])] -) -def test_next_prime(prime, next_p): - assert next_prime(prime) == next_p - - -@pytest.mark.parametrize("val", [-1, 0, 1]) -def test_next_prime_with_nums_less_2(val): - assert next_prime(val) == 2 - - -@pytest.mark.slow -@pytest.mark.parametrize("prime", smallprimes) -def test_square_root_mod_prime_for_small_primes(prime): - squares = set() - for num in range(0, 1 + prime // 2): - sq = num * num % prime - squares.add(sq) - root = square_root_mod_prime(sq, prime) - # tested for real with TestNumbertheory.test_square_root_mod_prime - assert root * root % prime == sq - - for nonsquare in range(0, prime): - if nonsquare in squares: - continue - with pytest.raises(SquareRootError): - square_root_mod_prime(nonsquare, prime) - - -def test_square_root_mod_prime_for_2(): - a = square_root_mod_prime(1, 2) - assert a == 1 - - -def test_square_root_mod_prime_for_small_prime(): - root = square_root_mod_prime(98**2 % 101, 101) - assert root * root % 101 == 9 - - -def test_square_root_mod_prime_for_p_congruent_5(): - p = 13 - assert p % 8 == 5 - - root = square_root_mod_prime(3, p) - assert root * root % p == 3 - - -def test_square_root_mod_prime_for_p_congruent_5_large_d(): - p = 29 - assert p % 8 == 5 - - root = square_root_mod_prime(4, p) - assert root * root % p == 4 - - -class TestSquareRootModPrime(unittest.TestCase): - def test_power_of_2_p(self): - with self.assertRaises(JacobiError): - square_root_mod_prime(12, 32) - - def test_no_square(self): - with self.assertRaises(SquareRootError) as e: - square_root_mod_prime(12, 31) - - self.assertIn("no square root", str(e.exception)) - - def test_non_prime(self): - with self.assertRaises(SquareRootError) as e: - square_root_mod_prime(12, 33) - - self.assertIn("p is not prime", str(e.exception)) - - def test_non_prime_with_negative(self): - with self.assertRaises(SquareRootError) as e: - square_root_mod_prime(697 - 1, 697) - - self.assertIn("p is not prime", str(e.exception)) - - -@st.composite -def st_two_nums_rel_prime(draw): - # 521-bit is the biggest curve we operate on, use 1024 for a bit - # of breathing space - mod = draw(st.integers(min_value=2, max_value=2**1024)) - num = draw( - st.integers(min_value=1, max_value=mod - 1).filter( - lambda x: gcd(x, mod) == 1 - ) - ) - return num, mod - - -@st.composite -def st_primes(draw, *args, **kwargs): - if "min_value" not in kwargs: # pragma: no branch - kwargs["min_value"] = 1 - prime = draw( - st.sampled_from(smallprimes) - | st.integers(*args, **kwargs).filter(is_prime) - ) - return prime - - -@st.composite -def st_num_square_prime(draw): - prime = draw(st_primes(max_value=2**1024)) - num = draw(st.integers(min_value=0, max_value=1 + prime // 2)) - sq = num * num % prime - return sq, prime - - -@st.composite -def st_comp_with_com_fac(draw): - """ - Strategy that returns lists of numbers, all having a common factor. - """ - primes = draw( - st.lists(st_primes(max_value=2**512), min_size=1, max_size=10) - ) - # select random prime(s) that will make the common factor of composites - com_fac_primes = draw( - st.lists(st.sampled_from(primes), min_size=1, max_size=20) - ) - com_fac = reduce(operator.mul, com_fac_primes, 1) - - # select at most 20 lists (returned numbers), - # each having at most 30 primes (factors) including none (then the number - # will be 1) - comp_primes = draw( # pragma: no branch - st.integers(min_value=1, max_value=20).flatmap( - lambda n: st.lists( - st.lists(st.sampled_from(primes), max_size=30), - min_size=1, - max_size=n, - ) - ) - ) - - return [reduce(operator.mul, nums, 1) * com_fac for nums in comp_primes] - - -@st.composite -def st_comp_no_com_fac(draw): - """ - Strategy that returns lists of numbers that don't have a common factor. - """ - primes = draw( - st.lists( - st_primes(max_value=2**512), min_size=2, max_size=10, unique=True - ) - ) - # first select the primes that will create the uncommon factor - # between returned numbers - uncom_fac_primes = draw( - st.lists( - st.sampled_from(primes), - min_size=1, - max_size=len(primes) - 1, - unique=True, - ) - ) - uncom_fac = reduce(operator.mul, uncom_fac_primes, 1) - - # then build composites from leftover primes - leftover_primes = [i for i in primes if i not in uncom_fac_primes] - - assert leftover_primes - assert uncom_fac_primes - - # select at most 20 lists, each having at most 30 primes - # selected from the leftover_primes list - number_primes = draw( # pragma: no branch - st.integers(min_value=1, max_value=20).flatmap( - lambda n: st.lists( - st.lists(st.sampled_from(leftover_primes), max_size=30), - min_size=1, - max_size=n, - ) - ) - ) - - numbers = [reduce(operator.mul, nums, 1) for nums in number_primes] - - insert_at = draw(st.integers(min_value=0, max_value=len(numbers))) - numbers.insert(insert_at, uncom_fac) - return numbers - - -HYP_SETTINGS = {} -if HC_PRESENT: # pragma: no branch - HYP_SETTINGS["suppress_health_check"] = [ - HealthCheck.filter_too_much, - HealthCheck.too_slow, - ] - # the factorization() sometimes takes a long time to finish - HYP_SETTINGS["deadline"] = 5000 - -if "--fast" in sys.argv: # pragma: no cover - HYP_SETTINGS["max_examples"] = 20 - - -HYP_SLOW_SETTINGS = dict(HYP_SETTINGS) -if "--fast" in sys.argv: # pragma: no cover - HYP_SLOW_SETTINGS["max_examples"] = 1 -else: - HYP_SLOW_SETTINGS["max_examples"] = 20 - - -class TestIsPrime(unittest.TestCase): - def test_very_small_prime(self): - assert is_prime(23) - - def test_very_small_composite(self): - assert not is_prime(22) - - def test_small_prime(self): - assert is_prime(123456791) - - def test_special_composite(self): - assert not is_prime(10261) - - def test_medium_prime_1(self): - # nextPrime[2^256] - assert is_prime(2**256 + 0x129) - - def test_medium_prime_2(self): - # nextPrime(2^256+0x129) - assert is_prime(2**256 + 0x12D) - - def test_medium_trivial_composite(self): - assert not is_prime(2**256 + 0x130) - - def test_medium_non_trivial_composite(self): - assert not is_prime(2**256 + 0x12F) - - def test_large_prime(self): - # nextPrime[2^2048] - assert is_prime(mpz(2) ** 2048 + 0x3D5) - - def test_pseudoprime_base_19(self): - assert not is_prime(1543267864443420616877677640751301) - - def test_pseudoprime_base_300(self): - # F. Arnault "Constructing Carmichael Numbers Which Are Strong - # Pseudoprimes to Several Bases". Journal of Symbolic - # Computation. 20 (2): 151-161. doi:10.1006/jsco.1995.1042. - # Section 4.4 Large Example (a pseudoprime to all bases up to - # 300) - p = int( - "29 674 495 668 685 510 550 154 174 642 905 332 730 " - "771 991 799 853 043 350 995 075 531 276 838 753 171 " - "770 199 594 238 596 428 121 188 033 664 754 218 345 " - "562 493 168 782 883".replace(" ", "") - ) - - assert is_prime(p) - for _ in range(10): - if not is_prime(p * (313 * (p - 1) + 1) * (353 * (p - 1) + 1)): - break - else: - assert False, "composite not detected" - - -class TestNumbertheory(unittest.TestCase): - def test_gcd(self): - assert gcd(3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13) == 3 * 5 - assert gcd([3 * 5 * 7, 3 * 5 * 11, 3 * 5 * 13]) == 3 * 5 - assert gcd(3) == 3 - - @unittest.skipUnless( - HC_PRESENT, - "Hypothesis 2.0.0 can't be made tolerant of hard to " - "meet requirements (like `is_prime()`), the test " - "case times-out on it", - ) - @settings(**HYP_SLOW_SETTINGS) - @example([877 * 1151, 877 * 1009]) - @given(st_comp_with_com_fac()) - def test_gcd_with_com_factor(self, numbers): - n = gcd(numbers) - assert 1 in numbers or n != 1 - for i in numbers: - assert i % n == 0 - - @unittest.skipUnless( - HC_PRESENT, - "Hypothesis 2.0.0 can't be made tolerant of hard to " - "meet requirements (like `is_prime()`), the test " - "case times-out on it", - ) - @settings(**HYP_SLOW_SETTINGS) - @example([1151, 1069, 1009]) - @given(st_comp_no_com_fac()) - def test_gcd_with_uncom_factor(self, numbers): - n = gcd(numbers) - assert n == 1 - - @settings(**HYP_SLOW_SETTINGS) - @given( - st.lists( - st.integers(min_value=1, max_value=2**8192), - min_size=1, - max_size=20, - ) - ) - def test_gcd_with_random_numbers(self, numbers): - n = gcd(numbers) - for i in numbers: - # check that at least it's a divider - assert i % n == 0 - - def test_lcm(self): - assert lcm(3, 5 * 3, 7 * 3) == 3 * 5 * 7 - assert lcm([3, 5 * 3, 7 * 3]) == 3 * 5 * 7 - assert lcm(3) == 3 - - @settings(**HYP_SLOW_SETTINGS) - @given( - st.lists( - st.integers(min_value=1, max_value=2**8192), - min_size=1, - max_size=20, - ) - ) - def test_lcm_with_random_numbers(self, numbers): - n = lcm(numbers) - for i in numbers: - assert n % i == 0 - - @unittest.skipUnless( - HC_PRESENT, - "Hypothesis 2.0.0 can't be made tolerant of hard to " - "meet requirements (like `is_prime()`), the test " - "case times-out on it", - ) - @settings(**HYP_SLOW_SETTINGS) - @given(st_num_square_prime()) - def test_square_root_mod_prime(self, vals): - square, prime = vals - - calc = square_root_mod_prime(square, prime) - assert calc * calc % prime == square - - @pytest.mark.slow - @settings(**HYP_SLOW_SETTINGS) - @given(st.integers(min_value=1, max_value=10**12)) - @example(265399 * 1526929) - @example(373297**2 * 553991) - def test_factorization(self, num): - factors = factorization(num) - mult = 1 - for i in factors: - mult *= i[0] ** i[1] - assert mult == num - - def test_factorisation_smallprimes(self): - exp = 101 * 103 - assert 101 in smallprimes - assert 103 in smallprimes - factors = factorization(exp) - mult = 1 - for i in factors: - mult *= i[0] ** i[1] - assert mult == exp - - def test_factorisation_not_smallprimes(self): - exp = 1231 * 1237 - assert 1231 not in smallprimes - assert 1237 not in smallprimes - factors = factorization(exp) - mult = 1 - for i in factors: - mult *= i[0] ** i[1] - assert mult == exp - - def test_jacobi_with_zero(self): - assert jacobi(0, 3) == 0 - - def test_jacobi_with_one(self): - assert jacobi(1, 3) == 1 - - @settings(**HYP_SLOW_SETTINGS) - @given(st.integers(min_value=3, max_value=1000).filter(lambda x: x % 2)) - def test_jacobi(self, mod): - mod = mpz(mod) - if is_prime(mod): - squares = set() - for root in range(1, mod): - root = mpz(root) - assert jacobi(root * root, mod) == 1 - squares.add(root * root % mod) - for i in range(1, mod): - if i not in squares: - i = mpz(i) - assert jacobi(i, mod) == -1 - else: - factors = factorization(mod) - for a in range(1, mod): - c = 1 - for i in factors: - c *= jacobi(a, i[0]) ** i[1] - assert c == jacobi(a, mod) - - @settings(**HYP_SLOW_SETTINGS) - @given(st_two_nums_rel_prime()) - def test_inverse_mod(self, nums): - num, mod = nums - - inv = inverse_mod(num, mod) - - assert 0 < inv < mod - assert num * inv % mod == 1 - - def test_inverse_mod_with_zero(self): - assert 0 == inverse_mod(0, 11) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_pyecdsa.py b/venv/lib/python3.12/site-packages/ecdsa/test_pyecdsa.py deleted file mode 100644 index 799e9b74..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_pyecdsa.py +++ /dev/null @@ -1,2564 +0,0 @@ -from __future__ import with_statement, division, print_function - -try: - import unittest2 as unittest -except ImportError: - import unittest -import os -import shutil -import subprocess -import pytest -import sys -from binascii import hexlify, unhexlify -import hashlib -from functools import partial - -from hypothesis import given, settings -import hypothesis.strategies as st - -from six import binary_type -from .keys import SigningKey, VerifyingKey -from .keys import BadSignatureError, MalformedPointError, BadDigestError -from . import util -from .util import ( - sigencode_der, - sigencode_strings, - sigencode_strings_canonize, - sigencode_string_canonize, - sigencode_der_canonize, -) -from .util import sigdecode_der, sigdecode_strings, sigdecode_string -from .util import number_to_string, encoded_oid_ecPublicKey, MalformedSignature -from .curves import Curve, UnknownCurveError -from .curves import ( - SECP112r1, - SECP112r2, - SECP128r1, - SECP160r1, - NIST192p, - NIST224p, - NIST256p, - NIST384p, - NIST521p, - SECP256k1, - BRAINPOOLP160r1, - BRAINPOOLP192r1, - BRAINPOOLP224r1, - BRAINPOOLP256r1, - BRAINPOOLP320r1, - BRAINPOOLP384r1, - BRAINPOOLP512r1, - BRAINPOOLP160t1, - BRAINPOOLP192t1, - BRAINPOOLP224t1, - BRAINPOOLP256t1, - BRAINPOOLP320t1, - BRAINPOOLP384t1, - BRAINPOOLP512t1, - Ed25519, - Ed448, - curves, -) -from .ecdsa import ( - curve_brainpoolp224r1, - curve_brainpoolp256r1, - curve_brainpoolp384r1, - curve_brainpoolp512r1, -) -from .ellipticcurve import Point -from . import der -from . import rfc6979 -from . import ecdsa - - -class SubprocessError(Exception): - pass - - -HYP_SETTINGS = {} - - -if "--fast" in sys.argv: # pragma: no cover - HYP_SETTINGS["max_examples"] = 2 - - -def run_openssl(cmd): - OPENSSL = "openssl" - p = subprocess.Popen( - [OPENSSL] + cmd.split(), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - stdout, ignored = p.communicate() - if p.returncode != 0: - raise SubprocessError( - "cmd '%s %s' failed: rc=%s, stdout/err was %s" - % (OPENSSL, cmd, p.returncode, stdout) - ) - return stdout.decode() - - -class ECDSA(unittest.TestCase): - def test_basic(self): - priv = SigningKey.generate() - pub = priv.get_verifying_key() - - data = b"blahblah" - sig = priv.sign(data) - - self.assertTrue(pub.verify(sig, data)) - self.assertRaises(BadSignatureError, pub.verify, sig, data + b"bad") - - pub2 = VerifyingKey.from_string(pub.to_string()) - self.assertTrue(pub2.verify(sig, data)) - - def test_deterministic(self): - data = b"blahblah" - secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16) - - priv = SigningKey.from_secret_exponent( - secexp, SECP256k1, hashlib.sha256 - ) - pub = priv.get_verifying_key() - - k = rfc6979.generate_k( - SECP256k1.generator.order(), - secexp, - hashlib.sha256, - hashlib.sha256(data).digest(), - ) - - sig1 = priv.sign(data, k=k) - self.assertTrue(pub.verify(sig1, data)) - - sig2 = priv.sign(data, k=k) - self.assertTrue(pub.verify(sig2, data)) - - sig3 = priv.sign_deterministic(data, hashlib.sha256) - self.assertTrue(pub.verify(sig3, data)) - - self.assertEqual(sig1, sig2) - self.assertEqual(sig1, sig3) - - def test_bad_usage(self): - # sk=SigningKey() is wrong - self.assertRaises(TypeError, SigningKey) - self.assertRaises(TypeError, VerifyingKey) - - def test_lengths_default(self): - default = NIST192p - priv = SigningKey.generate() - pub = priv.get_verifying_key() - self.assertEqual(len(pub.to_string()), default.verifying_key_length) - sig = priv.sign(b"data") - self.assertEqual(len(sig), default.signature_length) - - def test_serialize(self): - seed = b"secret" - curve = NIST192p - secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order) - secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order) - self.assertEqual(secexp1, secexp2) - priv1 = SigningKey.from_secret_exponent(secexp1, curve) - priv2 = SigningKey.from_secret_exponent(secexp2, curve) - self.assertEqual( - hexlify(priv1.to_string()), hexlify(priv2.to_string()) - ) - self.assertEqual(priv1.to_pem(), priv2.to_pem()) - pub1 = priv1.get_verifying_key() - pub2 = priv2.get_verifying_key() - data = b"data" - sig1 = priv1.sign(data) - sig2 = priv2.sign(data) - self.assertTrue(pub1.verify(sig1, data)) - self.assertTrue(pub2.verify(sig1, data)) - self.assertTrue(pub1.verify(sig2, data)) - self.assertTrue(pub2.verify(sig2, data)) - self.assertEqual(hexlify(pub1.to_string()), hexlify(pub2.to_string())) - - def test_nonrandom(self): - s = b"all the entropy in the entire world, compressed into one line" - - def not_much_entropy(numbytes): - return s[:numbytes] - - # we control the entropy source, these two keys should be identical: - priv1 = SigningKey.generate(entropy=not_much_entropy) - priv2 = SigningKey.generate(entropy=not_much_entropy) - self.assertEqual( - hexlify(priv1.get_verifying_key().to_string()), - hexlify(priv2.get_verifying_key().to_string()), - ) - # likewise, signatures should be identical. Obviously you'd never - # want to do this with keys you care about, because the secrecy of - # the private key depends upon using different random numbers for - # each signature - sig1 = priv1.sign(b"data", entropy=not_much_entropy) - sig2 = priv2.sign(b"data", entropy=not_much_entropy) - self.assertEqual(hexlify(sig1), hexlify(sig2)) - - def assertTruePrivkeysEqual(self, priv1, priv2): - self.assertEqual( - priv1.privkey.secret_multiplier, priv2.privkey.secret_multiplier - ) - self.assertEqual( - priv1.privkey.public_key.generator, - priv2.privkey.public_key.generator, - ) - - def test_privkey_creation(self): - s = b"all the entropy in the entire world, compressed into one line" - - def not_much_entropy(numbytes): - return s[:numbytes] - - priv1 = SigningKey.generate() - self.assertEqual(priv1.baselen, NIST192p.baselen) - - priv1 = SigningKey.generate(curve=NIST224p) - self.assertEqual(priv1.baselen, NIST224p.baselen) - - priv1 = SigningKey.generate(entropy=not_much_entropy) - self.assertEqual(priv1.baselen, NIST192p.baselen) - priv2 = SigningKey.generate(entropy=not_much_entropy) - self.assertEqual(priv2.baselen, NIST192p.baselen) - self.assertTruePrivkeysEqual(priv1, priv2) - - priv1 = SigningKey.from_secret_exponent(secexp=3) - self.assertEqual(priv1.baselen, NIST192p.baselen) - priv2 = SigningKey.from_secret_exponent(secexp=3) - self.assertTruePrivkeysEqual(priv1, priv2) - - priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p) - self.assertEqual(priv1.baselen, NIST224p.baselen) - - def test_privkey_strings(self): - priv1 = SigningKey.generate() - s1 = priv1.to_string() - self.assertEqual(type(s1), binary_type) - self.assertEqual(len(s1), NIST192p.baselen) - priv2 = SigningKey.from_string(s1) - self.assertTruePrivkeysEqual(priv1, priv2) - - s1 = priv1.to_pem() - self.assertEqual(type(s1), binary_type) - self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) - self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) - priv2 = SigningKey.from_pem(s1) - self.assertTruePrivkeysEqual(priv1, priv2) - - s1 = priv1.to_der() - self.assertEqual(type(s1), binary_type) - priv2 = SigningKey.from_der(s1) - self.assertTruePrivkeysEqual(priv1, priv2) - - priv1 = SigningKey.generate(curve=NIST256p) - s1 = priv1.to_pem() - self.assertEqual(type(s1), binary_type) - self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) - self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) - priv2 = SigningKey.from_pem(s1) - self.assertTruePrivkeysEqual(priv1, priv2) - - s1 = priv1.to_der() - self.assertEqual(type(s1), binary_type) - priv2 = SigningKey.from_der(s1) - self.assertTruePrivkeysEqual(priv1, priv2) - - def test_privkey_strings_brainpool(self): - priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) - s1 = priv1.to_pem() - self.assertEqual(type(s1), binary_type) - self.assertTrue(s1.startswith(b"-----BEGIN EC PRIVATE KEY-----")) - self.assertTrue(s1.strip().endswith(b"-----END EC PRIVATE KEY-----")) - priv2 = SigningKey.from_pem(s1) - self.assertTruePrivkeysEqual(priv1, priv2) - - s1 = priv1.to_der() - self.assertEqual(type(s1), binary_type) - priv2 = SigningKey.from_der(s1) - self.assertTruePrivkeysEqual(priv1, priv2) - - def assertTruePubkeysEqual(self, pub1, pub2): - self.assertEqual(pub1.pubkey.point, pub2.pubkey.point) - self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator) - self.assertEqual(pub1.curve, pub2.curve) - - def test_pubkey_strings(self): - priv1 = SigningKey.generate() - pub1 = priv1.get_verifying_key() - s1 = pub1.to_string() - self.assertEqual(type(s1), binary_type) - self.assertEqual(len(s1), NIST192p.verifying_key_length) - pub2 = VerifyingKey.from_string(s1) - self.assertTruePubkeysEqual(pub1, pub2) - - priv1 = SigningKey.generate(curve=NIST256p) - pub1 = priv1.get_verifying_key() - s1 = pub1.to_string() - self.assertEqual(type(s1), binary_type) - self.assertEqual(len(s1), NIST256p.verifying_key_length) - pub2 = VerifyingKey.from_string(s1, curve=NIST256p) - self.assertTruePubkeysEqual(pub1, pub2) - - pub1_der = pub1.to_der() - self.assertEqual(type(pub1_der), binary_type) - pub2 = VerifyingKey.from_der(pub1_der) - self.assertTruePubkeysEqual(pub1, pub2) - - self.assertRaises( - der.UnexpectedDER, VerifyingKey.from_der, pub1_der + b"junk" - ) - badpub = VerifyingKey.from_der(pub1_der) - - class FakeGenerator: - def order(self): - return 123456789 - - class FakeCurveFp: - def p(self): - return int( - "6525534529039240705020950546962731340" - "4541085228058844382513856749047873406763" - ) - - badcurve = Curve( - "unknown", FakeCurveFp(), FakeGenerator(), (1, 2, 3, 4, 5, 6), None - ) - badpub.curve = badcurve - badder = badpub.to_der() - self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder) - - pem = pub1.to_pem() - self.assertEqual(type(pem), binary_type) - self.assertTrue(pem.startswith(b"-----BEGIN PUBLIC KEY-----"), pem) - self.assertTrue(pem.strip().endswith(b"-----END PUBLIC KEY-----"), pem) - pub2 = VerifyingKey.from_pem(pem) - self.assertTruePubkeysEqual(pub1, pub2) - - def test_pubkey_strings_brainpool(self): - priv1 = SigningKey.generate(curve=BRAINPOOLP512r1) - pub1 = priv1.get_verifying_key() - s1 = pub1.to_string() - self.assertEqual(type(s1), binary_type) - self.assertEqual(len(s1), BRAINPOOLP512r1.verifying_key_length) - pub2 = VerifyingKey.from_string(s1, curve=BRAINPOOLP512r1) - self.assertTruePubkeysEqual(pub1, pub2) - - pub1_der = pub1.to_der() - self.assertEqual(type(pub1_der), binary_type) - pub2 = VerifyingKey.from_der(pub1_der) - self.assertTruePubkeysEqual(pub1, pub2) - - def test_vk_to_der_with_invalid_point_encoding(self): - sk = SigningKey.generate() - vk = sk.verifying_key - - with self.assertRaises(ValueError): - vk.to_der("raw") - - def test_sk_to_der_with_invalid_point_encoding(self): - sk = SigningKey.generate() - - with self.assertRaises(ValueError): - sk.to_der("raw") - - def test_vk_from_der_garbage_after_curve_oid(self): - type_oid_der = encoded_oid_ecPublicKey - curve_oid_der = ( - der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + b"garbage" - ) - enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) - point_der = der.encode_bitstring(b"\x00\xff", None) - to_decode = der.encode_sequence(enc_type_der, point_der) - - with self.assertRaises(der.UnexpectedDER): - VerifyingKey.from_der(to_decode) - - def test_vk_from_der_invalid_key_type(self): - type_oid_der = der.encode_oid(*(1, 2, 3)) - curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) - enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) - point_der = der.encode_bitstring(b"\x00\xff", None) - to_decode = der.encode_sequence(enc_type_der, point_der) - - with self.assertRaises(der.UnexpectedDER): - VerifyingKey.from_der(to_decode) - - def test_vk_from_der_garbage_after_point_string(self): - type_oid_der = encoded_oid_ecPublicKey - curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) - enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) - point_der = der.encode_bitstring(b"\x00\xff", None) + b"garbage" - to_decode = der.encode_sequence(enc_type_der, point_der) - - with self.assertRaises(der.UnexpectedDER): - VerifyingKey.from_der(to_decode) - - def test_vk_from_der_invalid_bitstring(self): - type_oid_der = encoded_oid_ecPublicKey - curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) - enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) - point_der = der.encode_bitstring(b"\x08\xff", None) - to_decode = der.encode_sequence(enc_type_der, point_der) - - with self.assertRaises(der.UnexpectedDER): - VerifyingKey.from_der(to_decode) - - def test_vk_from_der_with_invalid_length_of_encoding(self): - type_oid_der = encoded_oid_ecPublicKey - curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) - enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) - point_der = der.encode_bitstring(b"\xff" * 64, 0) - to_decode = der.encode_sequence(enc_type_der, point_der) - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_der(to_decode) - - def test_vk_from_der_with_raw_encoding(self): - type_oid_der = encoded_oid_ecPublicKey - curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) - enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der) - point_der = der.encode_bitstring(b"\xff" * 48, 0) - to_decode = der.encode_sequence(enc_type_der, point_der) - - with self.assertRaises(der.UnexpectedDER): - VerifyingKey.from_der(to_decode) - - def test_signature_strings(self): - priv1 = SigningKey.generate() - pub1 = priv1.get_verifying_key() - data = b"data" - - sig = priv1.sign(data) - self.assertEqual(type(sig), binary_type) - self.assertEqual(len(sig), NIST192p.signature_length) - self.assertTrue(pub1.verify(sig, data)) - - sig = priv1.sign(data, sigencode=sigencode_strings) - self.assertEqual(type(sig), tuple) - self.assertEqual(len(sig), 2) - self.assertEqual(type(sig[0]), binary_type) - self.assertEqual(type(sig[1]), binary_type) - self.assertEqual(len(sig[0]), NIST192p.baselen) - self.assertEqual(len(sig[1]), NIST192p.baselen) - self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings)) - - sig_der = priv1.sign(data, sigencode=sigencode_der) - self.assertEqual(type(sig_der), binary_type) - self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der)) - - def test_sigencode_string_canonize_no_change(self): - r = 12 - s = 400 - order = SECP112r1.order - - new_r, new_s = sigdecode_string( - sigencode_string_canonize(r, s, order), order - ) - - self.assertEqual(r, new_r) - self.assertEqual(s, new_s) - - def test_sigencode_string_canonize(self): - r = 12 - order = SECP112r1.order - s = order - 10 - - new_r, new_s = sigdecode_string( - sigencode_string_canonize(r, s, order), order - ) - - self.assertEqual(r, new_r) - self.assertEqual(order - s, new_s) - - def test_sigencode_strings_canonize_no_change(self): - r = 12 - s = 400 - order = SECP112r1.order - - new_r, new_s = sigdecode_strings( - sigencode_strings_canonize(r, s, order), order - ) - - self.assertEqual(r, new_r) - self.assertEqual(s, new_s) - - def test_sigencode_strings_canonize(self): - r = 12 - order = SECP112r1.order - s = order - 10 - - new_r, new_s = sigdecode_strings( - sigencode_strings_canonize(r, s, order), order - ) - - self.assertEqual(r, new_r) - self.assertEqual(order - s, new_s) - - def test_sigencode_der_canonize_no_change(self): - r = 13 - s = 200 - order = SECP112r1.order - - new_r, new_s = sigdecode_der( - sigencode_der_canonize(r, s, order), order - ) - - self.assertEqual(r, new_r) - self.assertEqual(s, new_s) - - def test_sigencode_der_canonize(self): - r = 13 - order = SECP112r1.order - s = order - 14 - - new_r, new_s = sigdecode_der( - sigencode_der_canonize(r, s, order), order - ) - - self.assertEqual(r, new_r) - self.assertEqual(order - s, new_s) - - def test_sigencode_der_canonize_with_close_to_half_order(self): - r = 13 - order = SECP112r1.order - s = order // 2 + 1 - - regular_encode = sigencode_der(r, s, order) - canonical_encode = sigencode_der_canonize(r, s, order) - - self.assertNotEqual(regular_encode, canonical_encode) - - new_r, new_s = sigdecode_der( - sigencode_der_canonize(r, s, order), order - ) - - self.assertEqual(r, new_r) - self.assertEqual(order - s, new_s) - - def test_sig_decode_strings_with_invalid_count(self): - with self.assertRaises(MalformedSignature): - sigdecode_strings([b"one", b"two", b"three"], 0xFF) - - def test_sig_decode_strings_with_wrong_r_len(self): - with self.assertRaises(MalformedSignature): - sigdecode_strings([b"one", b"two"], 0xFF) - - def test_sig_decode_strings_with_wrong_s_len(self): - with self.assertRaises(MalformedSignature): - sigdecode_strings([b"\xa0", b"\xb0\xff"], 0xFF) - - def test_verify_with_too_long_input(self): - sk = SigningKey.generate() - vk = sk.verifying_key - - with self.assertRaises(BadDigestError): - vk.verify_digest(None, b"\x00" * 128) - - def test_sk_from_secret_exponent_with_wrong_sec_exponent(self): - with self.assertRaises(MalformedPointError): - SigningKey.from_secret_exponent(0) - - def test_sk_from_string_with_wrong_len_string(self): - with self.assertRaises(MalformedPointError): - SigningKey.from_string(b"\x01") - - def test_sk_from_der_with_junk_after_sequence(self): - ver_der = der.encode_integer(1) - to_decode = der.encode_sequence(ver_der) + b"garbage" - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sk_from_der_with_wrong_version(self): - ver_der = der.encode_integer(0) - to_decode = der.encode_sequence(ver_der) - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sk_from_der_invalid_const_tag(self): - ver_der = der.encode_integer(1) - privkey_der = der.encode_octet_string(b"\x00\xff") - curve_oid_der = der.encode_oid(*(1, 2, 3)) - const_der = der.encode_constructed(1, curve_oid_der) - to_decode = der.encode_sequence( - ver_der, privkey_der, const_der, curve_oid_der - ) - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sk_from_der_garbage_after_privkey_oid(self): - ver_der = der.encode_integer(1) - privkey_der = der.encode_octet_string(b"\x00\xff") - curve_oid_der = der.encode_oid(*(1, 2, 3)) + b"garbage" - const_der = der.encode_constructed(0, curve_oid_der) - to_decode = der.encode_sequence( - ver_der, privkey_der, const_der, curve_oid_der - ) - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sk_from_der_with_short_privkey(self): - ver_der = der.encode_integer(1) - privkey_der = der.encode_octet_string(b"\x00\xff") - curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) - const_der = der.encode_constructed(0, curve_oid_der) - to_decode = der.encode_sequence( - ver_der, privkey_der, const_der, curve_oid_der - ) - - sk = SigningKey.from_der(to_decode) - self.assertEqual(sk.privkey.secret_multiplier, 255) - - def test_sk_from_p8_der_with_wrong_version(self): - ver_der = der.encode_integer(2) - algorithm_der = der.encode_sequence( - der.encode_oid(1, 2, 840, 10045, 2, 1), - der.encode_oid(1, 2, 840, 10045, 3, 1, 1), - ) - privkey_der = der.encode_octet_string( - der.encode_sequence( - der.encode_integer(1), der.encode_octet_string(b"\x00\xff") - ) - ) - to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sk_from_p8_der_with_wrong_algorithm(self): - ver_der = der.encode_integer(1) - algorithm_der = der.encode_sequence( - der.encode_oid(1, 2, 3), der.encode_oid(1, 2, 840, 10045, 3, 1, 1) - ) - privkey_der = der.encode_octet_string( - der.encode_sequence( - der.encode_integer(1), der.encode_octet_string(b"\x00\xff") - ) - ) - to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sk_from_p8_der_with_trailing_junk_after_algorithm(self): - ver_der = der.encode_integer(1) - algorithm_der = der.encode_sequence( - der.encode_oid(1, 2, 840, 10045, 2, 1), - der.encode_oid(1, 2, 840, 10045, 3, 1, 1), - der.encode_octet_string(b"junk"), - ) - privkey_der = der.encode_octet_string( - der.encode_sequence( - der.encode_integer(1), der.encode_octet_string(b"\x00\xff") - ) - ) - to_decode = der.encode_sequence(ver_der, algorithm_der, privkey_der) - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sk_from_p8_der_with_trailing_junk_after_key(self): - ver_der = der.encode_integer(1) - algorithm_der = der.encode_sequence( - der.encode_oid(1, 2, 840, 10045, 2, 1), - der.encode_oid(1, 2, 840, 10045, 3, 1, 1), - ) - privkey_der = der.encode_octet_string( - der.encode_sequence( - der.encode_integer(1), der.encode_octet_string(b"\x00\xff") - ) - + der.encode_integer(999) - ) - to_decode = der.encode_sequence( - ver_der, - algorithm_der, - privkey_der, - der.encode_octet_string(b"junk"), - ) - - with self.assertRaises(der.UnexpectedDER): - SigningKey.from_der(to_decode) - - def test_sign_with_too_long_hash(self): - sk = SigningKey.from_secret_exponent(12) - - with self.assertRaises(BadDigestError): - sk.sign_digest(b"\xff" * 64) - - def test_hashfunc(self): - sk = SigningKey.generate(curve=NIST256p, hashfunc=hashlib.sha256) - data = b"security level is 128 bits" - sig = sk.sign(data) - vk = VerifyingKey.from_string( - sk.get_verifying_key().to_string(), - curve=NIST256p, - hashfunc=hashlib.sha256, - ) - self.assertTrue(vk.verify(sig, data)) - - sk2 = SigningKey.generate(curve=NIST256p) - sig2 = sk2.sign(data, hashfunc=hashlib.sha256) - vk2 = VerifyingKey.from_string( - sk2.get_verifying_key().to_string(), - curve=NIST256p, - hashfunc=hashlib.sha256, - ) - self.assertTrue(vk2.verify(sig2, data)) - - vk3 = VerifyingKey.from_string( - sk.get_verifying_key().to_string(), curve=NIST256p - ) - self.assertTrue(vk3.verify(sig, data, hashfunc=hashlib.sha256)) - - def test_public_key_recovery(self): - # Create keys - curve = BRAINPOOLP160r1 - - sk = SigningKey.generate(curve=curve) - vk = sk.get_verifying_key() - - # Sign a message - data = b"blahblah" - signature = sk.sign(data) - - # Recover verifying keys - recovered_vks = VerifyingKey.from_public_key_recovery( - signature, data, curve - ) - - # Test if each pk is valid - for recovered_vk in recovered_vks: - # Test if recovered vk is valid for the data - self.assertTrue(recovered_vk.verify(signature, data)) - - # Test if properties are equal - self.assertEqual(vk.curve, recovered_vk.curve) - self.assertEqual( - vk.default_hashfunc, recovered_vk.default_hashfunc - ) - - # Test if original vk is the list of recovered keys - self.assertIn( - vk.pubkey.point, - [recovered_vk.pubkey.point for recovered_vk in recovered_vks], - ) - - def test_public_key_recovery_with_custom_hash(self): - # Create keys - curve = BRAINPOOLP160r1 - - sk = SigningKey.generate(curve=curve, hashfunc=hashlib.sha256) - vk = sk.get_verifying_key() - - # Sign a message - data = b"blahblah" - signature = sk.sign(data) - - # Recover verifying keys - recovered_vks = VerifyingKey.from_public_key_recovery( - signature, - data, - curve, - hashfunc=hashlib.sha256, - allow_truncate=True, - ) - - # Test if each pk is valid - for recovered_vk in recovered_vks: - # Test if recovered vk is valid for the data - self.assertTrue(recovered_vk.verify(signature, data)) - - # Test if properties are equal - self.assertEqual(vk.curve, recovered_vk.curve) - self.assertEqual(hashlib.sha256, recovered_vk.default_hashfunc) - - # Test if original vk is the list of recovered keys - self.assertIn( - vk.pubkey.point, - [recovered_vk.pubkey.point for recovered_vk in recovered_vks], - ) - - def test_encoding(self): - sk = SigningKey.from_secret_exponent(123456789) - vk = sk.verifying_key - - exp = ( - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - self.assertEqual(vk.to_string(), exp) - self.assertEqual(vk.to_string("raw"), exp) - self.assertEqual(vk.to_string("uncompressed"), b"\x04" + exp) - self.assertEqual(vk.to_string("compressed"), b"\x02" + exp[:24]) - self.assertEqual(vk.to_string("hybrid"), b"\x06" + exp) - - def test_decoding(self): - sk = SigningKey.from_secret_exponent(123456789) - vk = sk.verifying_key - - enc = ( - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - - from_raw = VerifyingKey.from_string(enc) - self.assertEqual(from_raw.pubkey.point, vk.pubkey.point) - - from_uncompressed = VerifyingKey.from_string(b"\x04" + enc) - self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) - - from_compressed = VerifyingKey.from_string(b"\x02" + enc[:24]) - self.assertEqual(from_compressed.pubkey.point, vk.pubkey.point) - - from_uncompressed = VerifyingKey.from_string(b"\x06" + enc) - self.assertEqual(from_uncompressed.pubkey.point, vk.pubkey.point) - - def test_uncompressed_decoding_as_only_alowed(self): - enc = ( - b"\x04" - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - vk = VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) - sk = SigningKey.from_secret_exponent(123456789) - - self.assertEqual(vk, sk.verifying_key) - - def test_raw_decoding_with_blocked_format(self): - enc = ( - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - with self.assertRaises(MalformedPointError) as exp: - VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) - - self.assertIn("hybrid", str(exp.exception)) - - def test_decoding_with_unknown_format(self): - with self.assertRaises(ValueError) as e: - VerifyingKey.from_string(b"", valid_encodings=("raw", "foobar")) - - self.assertIn("Only uncompressed, compressed", str(e.exception)) - - def test_uncompressed_decoding_with_blocked_format(self): - enc = ( - b"\x04" - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - with self.assertRaises(MalformedPointError) as exp: - VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) - - self.assertIn("Invalid X9.62 encoding", str(exp.exception)) - - def test_hybrid_decoding_with_blocked_format(self): - enc = ( - b"\x06" - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - with self.assertRaises(MalformedPointError) as exp: - VerifyingKey.from_string(enc, valid_encodings=("uncompressed",)) - - self.assertIn("Invalid X9.62 encoding", str(exp.exception)) - - def test_hybrid_decoding_with_inconsistent_encoding_and_no_validation( - self, - ): - sk = SigningKey.from_secret_exponent(123456789) - vk = sk.verifying_key - - enc = vk.to_string("hybrid") - self.assertEqual(enc[:1], b"\x06") - enc = b"\x07" + enc[1:] - - b = VerifyingKey.from_string( - enc, valid_encodings=("hybrid",), validate_point=False - ) - - self.assertEqual(vk, b) - - def test_compressed_decoding_with_blocked_format(self): - enc = ( - b"\x02" - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - )[:25] - with self.assertRaises(MalformedPointError) as exp: - VerifyingKey.from_string(enc, valid_encodings=("hybrid", "raw")) - - self.assertIn("(hybrid, raw)", str(exp.exception)) - - def test_decoding_with_malformed_uncompressed(self): - enc = ( - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"\x02" + enc) - - def test_decoding_with_malformed_compressed(self): - enc = ( - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"\x01" + enc[:24]) - - def test_decoding_with_inconsistent_hybrid(self): - enc = ( - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"\x07" + enc) - - def test_decoding_with_inconsistent_hybrid_odd_point(self): - sk = SigningKey.from_secret_exponent(123456791) - vk = sk.verifying_key - - enc = vk.to_string("hybrid") - self.assertEqual(enc[:1], b"\x07") - enc = b"\x06" + enc[1:] - - with self.assertRaises(MalformedPointError): - b = VerifyingKey.from_string(enc, valid_encodings=("hybrid",)) - - def test_decoding_with_point_not_on_curve(self): - enc = ( - b"\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3" - b"\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4" - b"z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*" - ) - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(enc[:47] + b"\x00") - - def test_decoding_with_point_at_infinity(self): - # decoding it is unsupported, as it's not necessary to encode it - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"\x00") - - def test_not_lying_on_curve(self): - enc = number_to_string(NIST192p.curve.p(), NIST192p.curve.p() + 1) - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"\x02" + enc) - - def test_from_string_with_invalid_curve_too_short_ver_key_len(self): - # both verifying_key_length and baselen are calculated internally - # by the Curve constructor, but since we depend on them verify - # that inconsistent values are detected - curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) - curve.verifying_key_length = 16 - curve.baselen = 32 - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"\x00" * 16, curve) - - def test_from_string_with_invalid_curve_too_long_ver_key_len(self): - # both verifying_key_length and baselen are calculated internally - # by the Curve constructor, but since we depend on them verify - # that inconsistent values are detected - curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2)) - curve.verifying_key_length = 16 - curve.baselen = 16 - - with self.assertRaises(MalformedPointError): - VerifyingKey.from_string(b"\x00" * 16, curve) - - -@pytest.mark.parametrize( - "val,even", [(i, j) for i in range(256) for j in [True, False]] -) -def test_VerifyingKey_decode_with_small_values(val, even): - enc = number_to_string(val, NIST192p.order) - - if even: - enc = b"\x02" + enc - else: - enc = b"\x03" + enc - - # small values can both be actual valid public keys and not, verify that - # only expected exceptions are raised if they are not - try: - vk = VerifyingKey.from_string(enc) - assert isinstance(vk, VerifyingKey) - except MalformedPointError: - assert True - - -params = [] -for curve in curves: - for enc in ["raw", "uncompressed", "compressed", "hybrid"]: - params.append( - pytest.param(curve, enc, id="{0}-{1}".format(curve.name, enc)) - ) - - -@pytest.mark.parametrize("curve,encoding", params) -def test_VerifyingKey_encode_decode(curve, encoding): - sk = SigningKey.generate(curve=curve) - vk = sk.verifying_key - - encoded = vk.to_string(encoding) - - from_enc = VerifyingKey.from_string(encoded, curve=curve) - - assert vk.pubkey.point == from_enc.pubkey.point - - -if "--fast" in sys.argv: # pragma: no cover - params = [NIST192p, BRAINPOOLP160r1] -else: - params = curves - - -@pytest.mark.parametrize("curve", params) -def test_lengths(curve): - priv = SigningKey.generate(curve=curve) - pub1 = priv.get_verifying_key() - pub2 = VerifyingKey.from_string(pub1.to_string(), curve) - assert pub1.to_string() == pub2.to_string() - assert len(pub1.to_string()) == curve.verifying_key_length - sig = priv.sign(b"data") - assert len(sig) == curve.signature_length - - -@pytest.mark.slow -class OpenSSL(unittest.TestCase): - # test interoperability with OpenSSL tools. Note that openssl's ECDSA - # sign/verify arguments changed between 0.9.8 and 1.0.0: the early - # versions require "-ecdsa-with-SHA1", the later versions want just - # "-SHA1" (or to leave out that argument entirely, which means the - # signature will use some default digest algorithm, probably determined - # by the key, probably always SHA1). - # - # openssl ecparam -name secp224r1 -genkey -out privkey.pem - # openssl ec -in privkey.pem -text -noout # get the priv/pub keys - # openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt - # openssl asn1parse -in data.sig -inform DER - # data.sig is 64 bytes, probably 56b plus ASN1 overhead - # openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $? - # openssl ec -in privkey.pem -pubout -out pubkey.pem - # openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der - - OPENSSL_SUPPORTED_CURVES = set( - c.split(":")[0].strip() - for c in run_openssl("ecparam -list_curves").split("\n") - ) - - def get_openssl_messagedigest_arg(self, hash_name): - v = run_openssl("version") - # e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010", - # or "OpenSSL 0.9.8o 01 Jun 2010" - vs = v.split()[1].split(".") - if vs >= ["1", "0", "0"]: # pragma: no cover - return "-{0}".format(hash_name) - else: # pragma: no cover - return "-ecdsa-with-{0}".format(hash_name) - - # sk: 1:OpenSSL->python 2:python->OpenSSL - # vk: 3:OpenSSL->python 4:python->OpenSSL - # sig: 5:OpenSSL->python 6:python->OpenSSL - - @pytest.mark.slow - @pytest.mark.skipif( - "secp112r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp112r1", - ) - def test_from_openssl_secp112r1(self): - return self.do_test_from_openssl(SECP112r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp112r2" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp112r2", - ) - def test_from_openssl_secp112r2(self): - return self.do_test_from_openssl(SECP112r2) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp128r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp128r1", - ) - def test_from_openssl_secp128r1(self): - return self.do_test_from_openssl(SECP128r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp160r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp160r1", - ) - def test_from_openssl_secp160r1(self): - return self.do_test_from_openssl(SECP160r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime192v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime192v1", - ) - def test_from_openssl_nist192p(self): - return self.do_test_from_openssl(NIST192p) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime192v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime192v1", - ) - def test_from_openssl_nist192p_sha256(self): - return self.do_test_from_openssl(NIST192p, "SHA256") - - @pytest.mark.slow - @pytest.mark.skipif( - "secp224r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp224r1", - ) - def test_from_openssl_nist224p(self): - return self.do_test_from_openssl(NIST224p) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime256v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime256v1", - ) - def test_from_openssl_nist256p(self): - return self.do_test_from_openssl(NIST256p) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime256v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime256v1", - ) - def test_from_openssl_nist256p_sha384(self): - return self.do_test_from_openssl(NIST256p, "SHA384") - - @pytest.mark.slow - @pytest.mark.skipif( - "prime256v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime256v1", - ) - def test_from_openssl_nist256p_sha512(self): - return self.do_test_from_openssl(NIST256p, "SHA512") - - @pytest.mark.slow - @pytest.mark.skipif( - "secp384r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp384r1", - ) - def test_from_openssl_nist384p(self): - return self.do_test_from_openssl(NIST384p) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp521r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp521r1", - ) - def test_from_openssl_nist521p(self): - return self.do_test_from_openssl(NIST521p) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp256k1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp256k1", - ) - def test_from_openssl_secp256k1(self): - return self.do_test_from_openssl(SECP256k1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP160r1", - ) - def test_from_openssl_brainpoolp160r1(self): - return self.do_test_from_openssl(BRAINPOOLP160r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP192r1", - ) - def test_from_openssl_brainpoolp192r1(self): - return self.do_test_from_openssl(BRAINPOOLP192r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP224r1", - ) - def test_from_openssl_brainpoolp224r1(self): - return self.do_test_from_openssl(BRAINPOOLP224r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP256r1", - ) - def test_from_openssl_brainpoolp256r1(self): - return self.do_test_from_openssl(BRAINPOOLP256r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP320r1", - ) - def test_from_openssl_brainpoolp320r1(self): - return self.do_test_from_openssl(BRAINPOOLP320r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP384r1", - ) - def test_from_openssl_brainpoolp384r1(self): - return self.do_test_from_openssl(BRAINPOOLP384r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP512r1", - ) - def test_from_openssl_brainpoolp512r1(self): - return self.do_test_from_openssl(BRAINPOOLP512r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP160t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP160t1", - ) - def test_from_openssl_brainpoolp160t1(self): - return self.do_test_from_openssl(BRAINPOOLP160t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP192t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP192t1", - ) - def test_from_openssl_brainpoolp192t1(self): - return self.do_test_from_openssl(BRAINPOOLP192t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP224t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP224t1", - ) - def test_from_openssl_brainpoolp224t1(self): - return self.do_test_from_openssl(BRAINPOOLP224t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP256t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP256t1", - ) - def test_from_openssl_brainpoolp256t1(self): - return self.do_test_from_openssl(BRAINPOOLP256t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP320t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP320t1", - ) - def test_from_openssl_brainpoolp320t1(self): - return self.do_test_from_openssl(BRAINPOOLP320t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP384t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP384t1", - ) - def test_from_openssl_brainpoolp384t1(self): - return self.do_test_from_openssl(BRAINPOOLP384t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP512t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP512t1", - ) - def test_from_openssl_brainpoolp512t1(self): - return self.do_test_from_openssl(BRAINPOOLP512t1) - - def do_test_from_openssl(self, curve, hash_name="SHA1"): - curvename = curve.openssl_name - assert curvename - # OpenSSL: create sk, vk, sign. - # Python: read vk(3), checksig(5), read sk(1), sign, check - mdarg = self.get_openssl_messagedigest_arg(hash_name) - if os.path.isdir("t"): # pragma: no cover - shutil.rmtree("t") - os.mkdir("t") - run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename) - run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem") - data = b"data" - with open("t/data.txt", "wb") as e: - e.write(data) - run_openssl( - "dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg - ) - run_openssl( - "dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" - % mdarg - ) - with open("t/pubkey.pem", "rb") as e: - pubkey_pem = e.read() - vk = VerifyingKey.from_pem(pubkey_pem) # 3 - with open("t/data.sig", "rb") as e: - sig_der = e.read() - self.assertTrue( - vk.verify( - sig_der, - data, # 5 - hashfunc=partial(hashlib.new, hash_name), - sigdecode=sigdecode_der, - ) - ) - - with open("t/privkey.pem") as e: - fp = e.read() - sk = SigningKey.from_pem(fp) # 1 - sig = sk.sign(data, hashfunc=partial(hashlib.new, hash_name)) - self.assertTrue( - vk.verify(sig, data, hashfunc=partial(hashlib.new, hash_name)) - ) - - run_openssl( - "pkcs8 -topk8 -nocrypt " - "-in t/privkey.pem -outform pem -out t/privkey-p8.pem" - ) - with open("t/privkey-p8.pem", "rb") as e: - privkey_p8_pem = e.read() - sk_from_p8 = SigningKey.from_pem(privkey_p8_pem) - self.assertEqual(sk, sk_from_p8) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp112r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp112r1", - ) - def test_to_openssl_secp112r1(self): - self.do_test_to_openssl(SECP112r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp112r2" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp112r2", - ) - def test_to_openssl_secp112r2(self): - self.do_test_to_openssl(SECP112r2) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp128r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp128r1", - ) - def test_to_openssl_secp128r1(self): - self.do_test_to_openssl(SECP128r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp160r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp160r1", - ) - def test_to_openssl_secp160r1(self): - self.do_test_to_openssl(SECP160r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime192v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime192v1", - ) - def test_to_openssl_nist192p(self): - self.do_test_to_openssl(NIST192p) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime192v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime192v1", - ) - def test_to_openssl_nist192p_sha256(self): - self.do_test_to_openssl(NIST192p, "SHA256") - - @pytest.mark.slow - @pytest.mark.skipif( - "secp224r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp224r1", - ) - def test_to_openssl_nist224p(self): - self.do_test_to_openssl(NIST224p) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime256v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime256v1", - ) - def test_to_openssl_nist256p(self): - self.do_test_to_openssl(NIST256p) - - @pytest.mark.slow - @pytest.mark.skipif( - "prime256v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime256v1", - ) - def test_to_openssl_nist256p_sha384(self): - self.do_test_to_openssl(NIST256p, "SHA384") - - @pytest.mark.slow - @pytest.mark.skipif( - "prime256v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime256v1", - ) - def test_to_openssl_nist256p_sha512(self): - self.do_test_to_openssl(NIST256p, "SHA512") - - @pytest.mark.slow - @pytest.mark.skipif( - "secp384r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp384r1", - ) - def test_to_openssl_nist384p(self): - self.do_test_to_openssl(NIST384p) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp521r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp521r1", - ) - def test_to_openssl_nist521p(self): - self.do_test_to_openssl(NIST521p) - - @pytest.mark.slow - @pytest.mark.skipif( - "secp256k1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support secp256k1", - ) - def test_to_openssl_secp256k1(self): - self.do_test_to_openssl(SECP256k1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP160r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP160r1", - ) - def test_to_openssl_brainpoolp160r1(self): - self.do_test_to_openssl(BRAINPOOLP160r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP192r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP192r1", - ) - def test_to_openssl_brainpoolp192r1(self): - self.do_test_to_openssl(BRAINPOOLP192r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP224r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP224r1", - ) - def test_to_openssl_brainpoolp224r1(self): - self.do_test_to_openssl(BRAINPOOLP224r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP256r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP256r1", - ) - def test_to_openssl_brainpoolp256r1(self): - self.do_test_to_openssl(BRAINPOOLP256r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP320r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP320r1", - ) - def test_to_openssl_brainpoolp320r1(self): - self.do_test_to_openssl(BRAINPOOLP320r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP384r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP384r1", - ) - def test_to_openssl_brainpoolp384r1(self): - self.do_test_to_openssl(BRAINPOOLP384r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP512r1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP512r1", - ) - def test_to_openssl_brainpoolp512r1(self): - self.do_test_to_openssl(BRAINPOOLP512r1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP160t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP160t1", - ) - def test_to_openssl_brainpoolp160t1(self): - self.do_test_to_openssl(BRAINPOOLP160t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP192t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP192t1", - ) - def test_to_openssl_brainpoolp192t1(self): - self.do_test_to_openssl(BRAINPOOLP192t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP224t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP224t1", - ) - def test_to_openssl_brainpoolp224t1(self): - self.do_test_to_openssl(BRAINPOOLP224t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP256t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP256t1", - ) - def test_to_openssl_brainpoolp256t1(self): - self.do_test_to_openssl(BRAINPOOLP256t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP320t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP320t1", - ) - def test_to_openssl_brainpoolp320t1(self): - self.do_test_to_openssl(BRAINPOOLP320t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP384t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP384t1", - ) - def test_to_openssl_brainpoolp384t1(self): - self.do_test_to_openssl(BRAINPOOLP384t1) - - @pytest.mark.slow - @pytest.mark.skipif( - "brainpoolP512t1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support brainpoolP512t1", - ) - def test_to_openssl_brainpoolp512t1(self): - self.do_test_to_openssl(BRAINPOOLP512t1) - - def do_test_to_openssl(self, curve, hash_name="SHA1"): - curvename = curve.openssl_name - assert curvename - # Python: create sk, vk, sign. - # OpenSSL: read vk(4), checksig(6), read sk(2), sign, check - mdarg = self.get_openssl_messagedigest_arg(hash_name) - if os.path.isdir("t"): # pragma: no cover - shutil.rmtree("t") - os.mkdir("t") - sk = SigningKey.generate(curve=curve) - vk = sk.get_verifying_key() - data = b"data" - with open("t/pubkey.der", "wb") as e: - e.write(vk.to_der()) # 4 - with open("t/pubkey.pem", "wb") as e: - e.write(vk.to_pem()) # 4 - sig_der = sk.sign( - data, - hashfunc=partial(hashlib.new, hash_name), - sigencode=sigencode_der, - ) - - with open("t/data.sig", "wb") as e: - e.write(sig_der) # 6 - with open("t/data.txt", "wb") as e: - e.write(data) - with open("t/baddata.txt", "wb") as e: - e.write(data + b"corrupt") - - self.assertRaises( - SubprocessError, - run_openssl, - "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" - % mdarg, - ) - run_openssl( - "dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" - % mdarg - ) - - with open("t/privkey.pem", "wb") as e: - e.write(sk.to_pem()) # 2 - run_openssl( - "dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg - ) - run_openssl( - "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" - % mdarg - ) - - with open("t/privkey-explicit.pem", "wb") as e: - e.write(sk.to_pem(curve_parameters_encoding="explicit")) - run_openssl( - "dgst %s -sign t/privkey-explicit.pem -out t/data.sig2 t/data.txt" - % mdarg - ) - run_openssl( - "dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" - % mdarg - ) - - with open("t/privkey-p8.pem", "wb") as e: - e.write(sk.to_pem(format="pkcs8")) - run_openssl( - "dgst %s -sign t/privkey-p8.pem -out t/data.sig3 t/data.txt" - % mdarg - ) - run_openssl( - "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" - % mdarg - ) - - with open("t/privkey-p8-explicit.pem", "wb") as e: - e.write( - sk.to_pem(format="pkcs8", curve_parameters_encoding="explicit") - ) - run_openssl( - "dgst %s -sign t/privkey-p8-explicit.pem -out t/data.sig3 t/data.txt" - % mdarg - ) - run_openssl( - "dgst %s -verify t/pubkey.pem -signature t/data.sig3 t/data.txt" - % mdarg - ) - - OPENSSL_SUPPORTED_TYPES = set() - try: - if "-rawin" in run_openssl("pkeyutl -help"): - OPENSSL_SUPPORTED_TYPES = set( # pragma: no branch - c.lower() - for c in ("ED25519", "ED448") - if c in run_openssl("list -public-key-methods") - ) - except SubprocessError: # pragma: no cover - pass - - def do_eddsa_test_to_openssl(self, curve): - if os.path.isdir("t"): - shutil.rmtree("t") - os.mkdir("t") - - sk = SigningKey.generate(curve=curve) - vk = sk.get_verifying_key() - - data = b"data" - with open("t/pubkey.der", "wb") as e: - e.write(vk.to_der()) - with open("t/pubkey.pem", "wb") as e: - e.write(vk.to_pem()) - - sig = sk.sign(data) - - with open("t/data.sig", "wb") as e: - e.write(sig) - with open("t/data.txt", "wb") as e: - e.write(data) - with open("t/baddata.txt", "wb") as e: - e.write(data + b"corrupt") - - with self.assertRaises(SubprocessError): - run_openssl( - "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " - "-in t/baddata.txt -sigfile t/data.sig" - ) - run_openssl( - "pkeyutl -verify -pubin -inkey t/pubkey.pem -rawin " - "-in t/data.txt -sigfile t/data.sig" - ) - - shutil.rmtree("t") - - # in practice at least OpenSSL 3.0.0 is needed to make EdDSA signatures - # earlier versions support EdDSA only in X.509 certificates - @pytest.mark.slow - @pytest.mark.skipif( - "ed25519" not in OPENSSL_SUPPORTED_TYPES, - reason="system openssl does not support signing with Ed25519", - ) - def test_to_openssl_ed25519(self): - return self.do_eddsa_test_to_openssl(Ed25519) - - @pytest.mark.slow - @pytest.mark.skipif( - "ed448" not in OPENSSL_SUPPORTED_TYPES, - reason="system openssl does not support signing with Ed448", - ) - def test_to_openssl_ed448(self): - return self.do_eddsa_test_to_openssl(Ed448) - - def do_eddsa_test_from_openssl(self, curve): - curvename = curve.name - - if os.path.isdir("t"): - shutil.rmtree("t") - os.mkdir("t") - - data = b"data" - - run_openssl( - "genpkey -algorithm {0} -outform PEM -out t/privkey.pem".format( - curvename - ) - ) - run_openssl( - "pkey -outform PEM -pubout -in t/privkey.pem -out t/pubkey.pem" - ) - - with open("t/data.txt", "wb") as e: - e.write(data) - run_openssl( - "pkeyutl -sign -inkey t/privkey.pem " - "-rawin -in t/data.txt -out t/data.sig" - ) - - with open("t/data.sig", "rb") as e: - sig = e.read() - with open("t/pubkey.pem", "rb") as e: - vk = VerifyingKey.from_pem(e.read()) - - self.assertIs(vk.curve, curve) - - vk.verify(sig, data) - - shutil.rmtree("t") - - @pytest.mark.slow - @pytest.mark.skipif( - "ed25519" not in OPENSSL_SUPPORTED_TYPES, - reason="system openssl does not support signing with Ed25519", - ) - def test_from_openssl_ed25519(self): - return self.do_eddsa_test_from_openssl(Ed25519) - - @pytest.mark.slow - @pytest.mark.skipif( - "ed448" not in OPENSSL_SUPPORTED_TYPES, - reason="system openssl does not support signing with Ed448", - ) - def test_from_openssl_ed448(self): - return self.do_eddsa_test_from_openssl(Ed448) - - -class TooSmallCurve(unittest.TestCase): - OPENSSL_SUPPORTED_CURVES = set( - c.split(":")[0].strip() - for c in run_openssl("ecparam -list_curves").split("\n") - ) - - @pytest.mark.skipif( - "prime192v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime192v1", - ) - def test_sign_too_small_curve_dont_allow_truncate_raises(self): - sk = SigningKey.generate(curve=NIST192p) - data = b"data" - with self.assertRaises(BadDigestError): - sk.sign( - data, - hashfunc=partial(hashlib.new, "SHA256"), - sigencode=sigencode_der, - allow_truncate=False, - ) - - @pytest.mark.skipif( - "prime192v1" not in OPENSSL_SUPPORTED_CURVES, - reason="system openssl does not support prime192v1", - ) - def test_verify_too_small_curve_dont_allow_truncate_raises(self): - sk = SigningKey.generate(curve=NIST192p) - vk = sk.get_verifying_key() - data = b"data" - sig_der = sk.sign( - data, - hashfunc=partial(hashlib.new, "SHA256"), - sigencode=sigencode_der, - allow_truncate=True, - ) - with self.assertRaises(BadDigestError): - vk.verify( - sig_der, - data, - hashfunc=partial(hashlib.new, "SHA256"), - sigdecode=sigdecode_der, - allow_truncate=False, - ) - - -class DER(unittest.TestCase): - def test_integer(self): - self.assertEqual(der.encode_integer(0), b"\x02\x01\x00") - self.assertEqual(der.encode_integer(1), b"\x02\x01\x01") - self.assertEqual(der.encode_integer(127), b"\x02\x01\x7f") - self.assertEqual(der.encode_integer(128), b"\x02\x02\x00\x80") - self.assertEqual(der.encode_integer(256), b"\x02\x02\x01\x00") - # self.assertEqual(der.encode_integer(-1), b"\x02\x01\xff") - - def s(n): - return der.remove_integer(der.encode_integer(n) + b"junk") - - self.assertEqual(s(0), (0, b"junk")) - self.assertEqual(s(1), (1, b"junk")) - self.assertEqual(s(127), (127, b"junk")) - self.assertEqual(s(128), (128, b"junk")) - self.assertEqual(s(256), (256, b"junk")) - self.assertEqual( - s(1234567890123456789012345678901234567890), - (1234567890123456789012345678901234567890, b"junk"), - ) - - def test_number(self): - self.assertEqual(der.encode_number(0), b"\x00") - self.assertEqual(der.encode_number(127), b"\x7f") - self.assertEqual(der.encode_number(128), b"\x81\x00") - self.assertEqual(der.encode_number(3 * 128 + 7), b"\x83\x07") - # self.assertEqual(der.read_number("\x81\x9b" + "more"), (155, 2)) - # self.assertEqual(der.encode_number(155), b"\x81\x9b") - for n in (0, 1, 2, 127, 128, 3 * 128 + 7, 840, 10045): # , 155): - x = der.encode_number(n) + b"more" - n1, llen = der.read_number(x) - self.assertEqual(n1, n) - self.assertEqual(x[llen:], b"more") - - def test_length(self): - self.assertEqual(der.encode_length(0), b"\x00") - self.assertEqual(der.encode_length(127), b"\x7f") - self.assertEqual(der.encode_length(128), b"\x81\x80") - self.assertEqual(der.encode_length(255), b"\x81\xff") - self.assertEqual(der.encode_length(256), b"\x82\x01\x00") - self.assertEqual(der.encode_length(3 * 256 + 7), b"\x82\x03\x07") - self.assertEqual(der.read_length(b"\x81\x9b" + b"more"), (155, 2)) - self.assertEqual(der.encode_length(155), b"\x81\x9b") - for n in (0, 1, 2, 127, 128, 255, 256, 3 * 256 + 7, 155): - x = der.encode_length(n) + b"more" - n1, llen = der.read_length(x) - self.assertEqual(n1, n) - self.assertEqual(x[llen:], b"more") - - def test_sequence(self): - x = der.encode_sequence(b"ABC", b"DEF") + b"GHI" - self.assertEqual(x, b"\x30\x06ABCDEFGHI") - x1, rest = der.remove_sequence(x) - self.assertEqual(x1, b"ABCDEF") - self.assertEqual(rest, b"GHI") - - def test_constructed(self): - x = der.encode_constructed(0, NIST224p.encoded_oid) - self.assertEqual(hexlify(x), b"a007" + b"06052b81040021") - x = der.encode_constructed(1, unhexlify(b"0102030a0b0c")) - self.assertEqual(hexlify(x), b"a106" + b"0102030a0b0c") - - -class Util(unittest.TestCase): - @pytest.mark.slow - def test_trytryagain(self): - tta = util.randrange_from_seed__trytryagain - for i in range(1000): - seed = "seed-%d" % i - for order in ( - 2**8 - 2, - 2**8 - 1, - 2**8, - 2**8 + 1, - 2**8 + 2, - 2**16 - 1, - 2**16 + 1, - ): - n = tta(seed, order) - self.assertTrue(1 <= n < order, (1, n, order)) - # this trytryagain *does* provide long-term stability - self.assertEqual( - ("%x" % (tta("seed", NIST224p.order))).encode(), - b"6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc", - ) - - def test_trytryagain_single(self): - tta = util.randrange_from_seed__trytryagain - order = 2**8 - 2 - seed = b"text" - n = tta(seed, order) - # known issue: https://github.com/warner/python-ecdsa/issues/221 - if sys.version_info < (3, 0): # pragma: no branch - self.assertEqual(n, 228) - else: # pragma: no branch - self.assertEqual(n, 18) - - @settings(**HYP_SETTINGS) - @given(st.integers(min_value=0, max_value=10**200)) - def test_randrange(self, i): - # util.randrange does not provide long-term stability: we might - # change the algorithm in the future. - entropy = util.PRNG("seed-%d" % i) - for order in ( - 2**8 - 2, - 2**8 - 1, - 2**8, - 2**16 - 1, - 2**16 + 1, - ): - # that oddball 2**16+1 takes half our runtime - n = util.randrange(order, entropy=entropy) - self.assertTrue(1 <= n < order, (1, n, order)) - - def OFF_test_prove_uniformity(self): # pragma: no cover - order = 2**8 - 2 - counts = dict([(i, 0) for i in range(1, order)]) - assert 0 not in counts - assert order not in counts - for i in range(1000000): - seed = "seed-%d" % i - n = util.randrange_from_seed__trytryagain(seed, order) - counts[n] += 1 - # this technique should use the full range - self.assertTrue(counts[order - 1]) - for i in range(1, order): - print("%3d: %s" % (i, "*" * (counts[i] // 100))) - - -class RFC6979(unittest.TestCase): - # https://tools.ietf.org/html/rfc6979#appendix-A.1 - def _do(self, generator, secexp, hsh, hash_func, expected): - actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh) - self.assertEqual(expected, actual) - - def test_SECP256k1(self): - """RFC doesn't contain test vectors for SECP256k1 used in bitcoin. - This vector has been computed by Golang reference implementation instead.""" - self._do( - generator=SECP256k1.generator, - secexp=int("9d0219792467d7d37b4d43298a7d0c05", 16), - hsh=hashlib.sha256(b"sample").digest(), - hash_func=hashlib.sha256, - expected=int( - "8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", - 16, - ), - ) - - def test_SECP256k1_2(self): - self._do( - generator=SECP256k1.generator, - secexp=int( - "cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", - 16, - ), - hsh=hashlib.sha256(b"sample").digest(), - hash_func=hashlib.sha256, - expected=int( - "2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", - 16, - ), - ) - - def test_SECP256k1_3(self): - self._do( - generator=SECP256k1.generator, - secexp=0x1, - hsh=hashlib.sha256(b"Satoshi Nakamoto").digest(), - hash_func=hashlib.sha256, - expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15, - ) - - def test_SECP256k1_4(self): - self._do( - generator=SECP256k1.generator, - secexp=0x1, - hsh=hashlib.sha256( - b"All those moments will be lost in time, like tears in rain. Time to die..." - ).digest(), - hash_func=hashlib.sha256, - expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3, - ) - - def test_SECP256k1_5(self): - self._do( - generator=SECP256k1.generator, - secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140, - hsh=hashlib.sha256(b"Satoshi Nakamoto").digest(), - hash_func=hashlib.sha256, - expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90, - ) - - def test_SECP256k1_6(self): - self._do( - generator=SECP256k1.generator, - secexp=0xF8B8AF8CE3C7CCA5E300D33939540C10D45CE001B8F252BFBC57BA0342904181, - hsh=hashlib.sha256(b"Alan Turing").digest(), - hash_func=hashlib.sha256, - expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1, - ) - - def test_1(self): - # Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979 - self._do( - generator=Point( - None, - 0, - 0, - int("4000000000000000000020108A2E0CC0D99F8A5EF", 16), - ), - secexp=int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16), - hsh=unhexlify( - b"AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF" - ), - hash_func=hashlib.sha256, - expected=int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16), - ) - - def test_2(self): - self._do( - generator=NIST192p.generator, - secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=hashlib.sha1(b"sample").digest(), - hash_func=hashlib.sha1, - expected=int( - "37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16 - ), - ) - - def test_3(self): - self._do( - generator=NIST192p.generator, - secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=hashlib.sha256(b"sample").digest(), - hash_func=hashlib.sha256, - expected=int( - "32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16 - ), - ) - - def test_4(self): - self._do( - generator=NIST192p.generator, - secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=hashlib.sha512(b"sample").digest(), - hash_func=hashlib.sha512, - expected=int( - "A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16 - ), - ) - - def test_5(self): - self._do( - generator=NIST192p.generator, - secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=hashlib.sha1(b"test").digest(), - hash_func=hashlib.sha1, - expected=int( - "D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16 - ), - ) - - def test_6(self): - self._do( - generator=NIST192p.generator, - secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=hashlib.sha256(b"test").digest(), - hash_func=hashlib.sha256, - expected=int( - "5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16 - ), - ) - - def test_7(self): - self._do( - generator=NIST192p.generator, - secexp=int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16), - hsh=hashlib.sha512(b"test").digest(), - hash_func=hashlib.sha512, - expected=int( - "0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16 - ), - ) - - def test_8(self): - self._do( - generator=NIST521p.generator, - secexp=int( - "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", - 16, - ), - hsh=hashlib.sha1(b"sample").digest(), - hash_func=hashlib.sha1, - expected=int( - "089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", - 16, - ), - ) - - def test_9(self): - self._do( - generator=NIST521p.generator, - secexp=int( - "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", - 16, - ), - hsh=hashlib.sha256(b"sample").digest(), - hash_func=hashlib.sha256, - expected=int( - "0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", - 16, - ), - ) - - def test_10(self): - self._do( - generator=NIST521p.generator, - secexp=int( - "0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", - 16, - ), - hsh=hashlib.sha512(b"test").digest(), - hash_func=hashlib.sha512, - expected=int( - "16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", - 16, - ), - ) - - -class ECDH(unittest.TestCase): - def _do(self, curve, generator, dA, x_qA, y_qA, dB, x_qB, y_qB, x_Z, y_Z): - qA = dA * generator - qB = dB * generator - Z = dA * qB - self.assertEqual(Point(curve, x_qA, y_qA), qA) - self.assertEqual(Point(curve, x_qB, y_qB), qB) - self.assertTrue( - (dA * qB) - == (dA * dB * generator) - == (dB * dA * generator) - == (dB * qA) - ) - self.assertEqual(Point(curve, x_Z, y_Z), Z) - - -class RFC6932(ECDH): - # https://tools.ietf.org/html/rfc6932#appendix-A.1 - - def test_brainpoolP224r1(self): - self._do( - curve=curve_brainpoolp224r1, - generator=BRAINPOOLP224r1.generator, - dA=int( - "7C4B7A2C8A4BAD1FBB7D79CC0955DB7C6A4660CA64CC4778159B495E", 16 - ), - x_qA=int( - "B104A67A6F6E85E14EC1825E1539E8ECDBBF584922367DD88C6BDCF2", 16 - ), - y_qA=int( - "46D782E7FDB5F60CD8404301AC5949C58EDB26BC68BA07695B750A94", 16 - ), - dB=int( - "63976D4AAE6CD0F6DD18DEFEF55D96569D0507C03E74D6486FFA28FB", 16 - ), - x_qB=int( - "2A97089A9296147B71B21A4B574E1278245B536F14D8C2B9D07A874E", 16 - ), - y_qB=int( - "9B900D7C77A709A797276B8CA1BA61BB95B546FC29F862E44D59D25B", 16 - ), - x_Z=int( - "312DFD98783F9FB77B9704945A73BEB6DCCBE3B65D0F967DCAB574EB", 16 - ), - y_Z=int( - "6F800811D64114B1C48C621AB3357CF93F496E4238696A2A012B3C98", 16 - ), - ) - - def test_brainpoolP256r1(self): - self._do( - curve=curve_brainpoolp256r1, - generator=BRAINPOOLP256r1.generator, - dA=int( - "041EB8B1E2BC681BCE8E39963B2E9FC415B05283313DD1A8BCC055F11AE" - "49699", - 16, - ), - x_qA=int( - "78028496B5ECAAB3C8B6C12E45DB1E02C9E4D26B4113BC4F015F60C5C" - "CC0D206", - 16, - ), - y_qA=int( - "A2AE1762A3831C1D20F03F8D1E3C0C39AFE6F09B4D44BBE80CD100987" - "B05F92B", - 16, - ), - dB=int( - "06F5240EACDB9837BC96D48274C8AA834B6C87BA9CC3EEDD81F99A16B8D" - "804D3", - 16, - ), - x_qB=int( - "8E07E219BA588916C5B06AA30A2F464C2F2ACFC1610A3BE2FB240B635" - "341F0DB", - 16, - ), - y_qB=int( - "148EA1D7D1E7E54B9555B6C9AC90629C18B63BEE5D7AA6949EBBF47B2" - "4FDE40D", - 16, - ), - x_Z=int( - "05E940915549E9F6A4A75693716E37466ABA79B4BF2919877A16DD2CC2" - "E23708", - 16, - ), - y_Z=int( - "6BC23B6702BC5A019438CEEA107DAAD8B94232FFBBC350F3B137628FE6" - "FD134C", - 16, - ), - ) - - @pytest.mark.slow - def test_brainpoolP384r1(self): - self._do( - curve=curve_brainpoolp384r1, - generator=BRAINPOOLP384r1.generator, - dA=int( - "014EC0755B78594BA47FB0A56F6173045B4331E74BA1A6F47322E70D79D" - "828D97E095884CA72B73FDABD5910DF0FA76A", - 16, - ), - x_qA=int( - "45CB26E4384DAF6FB776885307B9A38B7AD1B5C692E0C32F012533277" - "8F3B8D3F50CA358099B30DEB5EE69A95C058B4E", - 16, - ), - y_qA=int( - "8173A1C54AFFA7E781D0E1E1D12C0DC2B74F4DF58E4A4E3AF7026C5D3" - "2DC530A2CD89C859BB4B4B768497F49AB8CC859", - 16, - ), - dB=int( - "6B461CB79BD0EA519A87D6828815D8CE7CD9B3CAA0B5A8262CBCD550A01" - "5C90095B976F3529957506E1224A861711D54", - 16, - ), - x_qB=int( - "01BF92A92EE4BE8DED1A911125C209B03F99E3161CFCC986DC7711383" - "FC30AF9CE28CA3386D59E2C8D72CE1E7B4666E8", - 16, - ), - y_qB=int( - "3289C4A3A4FEE035E39BDB885D509D224A142FF9FBCC5CFE5CCBB3026" - "8EE47487ED8044858D31D848F7A95C635A347AC", - 16, - ), - x_Z=int( - "04CC4FF3DCCCB07AF24E0ACC529955B36D7C807772B92FCBE48F3AFE9A" - "2F370A1F98D3FA73FD0C0747C632E12F1423EC", - 16, - ), - y_Z=int( - "7F465F90BD69AFB8F828A214EB9716D66ABC59F17AF7C75EE7F1DE22AB" - "5D05085F5A01A9382D05BF72D96698FE3FF64E", - 16, - ), - ) - - @pytest.mark.slow - def test_brainpoolP512r1(self): - self._do( - curve=curve_brainpoolp512r1, - generator=BRAINPOOLP512r1.generator, - dA=int( - "636B6BE0482A6C1C41AA7AE7B245E983392DB94CECEA2660A379CFE1595" - "59E357581825391175FC195D28BAC0CF03A7841A383B95C262B98378287" - "4CCE6FE333", - 16, - ), - x_qA=int( - "0562E68B9AF7CBFD5565C6B16883B777FF11C199161ECC427A39D17EC" - "2166499389571D6A994977C56AD8252658BA8A1B72AE42F4FB7532151" - "AFC3EF0971CCDA", - 16, - ), - y_qA=int( - "A7CA2D8191E21776A89860AFBC1F582FAA308D551C1DC6133AF9F9C3C" - "AD59998D70079548140B90B1F311AFB378AA81F51B275B2BE6B7DEE97" - "8EFC7343EA642E", - 16, - ), - dB=int( - "0AF4E7F6D52EDD52907BB8DBAB3992A0BB696EC10DF11892FF205B66D38" - "1ECE72314E6A6EA079CEA06961DBA5AE6422EF2E9EE803A1F236FB96A17" - "99B86E5C8B", - 16, - ), - x_qB=int( - "5A7954E32663DFF11AE24712D87419F26B708AC2B92877D6BFEE2BFC4" - "3714D89BBDB6D24D807BBD3AEB7F0C325F862E8BADE4F74636B97EAAC" - "E739E11720D323", - 16, - ), - y_qB=int( - "96D14621A9283A1BED84DE8DD64836B2C0758B11441179DC0C54C0D49" - "A47C03807D171DD544B72CAAEF7B7CE01C7753E2CAD1A861ECA55A719" - "54EE1BA35E04BE", - 16, - ), - x_Z=int( - "1EE8321A4BBF93B9CF8921AB209850EC9B7066D1984EF08C2BB7232362" - "08AC8F1A483E79461A00E0D5F6921CE9D360502F85C812BEDEE23AC5B2" - "10E5811B191E", - 16, - ), - y_Z=int( - "2632095B7B936174B41FD2FAF369B1D18DCADEED7E410A7E251F083109" - "7C50D02CFED02607B6A2D5ADB4C0006008562208631875B58B54ECDA5A" - "4F9FE9EAABA6", - 16, - ), - ) - - -class RFC7027(ECDH): - # https://tools.ietf.org/html/rfc7027#appendix-A - - def test_brainpoolP256r1(self): - self._do( - curve=curve_brainpoolp256r1, - generator=BRAINPOOLP256r1.generator, - dA=int( - "81DB1EE100150FF2EA338D708271BE38300CB54241D79950F77B0630398" - "04F1D", - 16, - ), - x_qA=int( - "44106E913F92BC02A1705D9953A8414DB95E1AAA49E81D9E85F929A8E" - "3100BE5", - 16, - ), - y_qA=int( - "8AB4846F11CACCB73CE49CBDD120F5A900A69FD32C272223F789EF10E" - "B089BDC", - 16, - ), - dB=int( - "55E40BC41E37E3E2AD25C3C6654511FFA8474A91A0032087593852D3E7D" - "76BD3", - 16, - ), - x_qB=int( - "8D2D688C6CF93E1160AD04CC4429117DC2C41825E1E9FCA0ADDD34E6F" - "1B39F7B", - 16, - ), - y_qB=int( - "990C57520812BE512641E47034832106BC7D3E8DD0E4C7F1136D70065" - "47CEC6A", - 16, - ), - x_Z=int( - "89AFC39D41D3B327814B80940B042590F96556EC91E6AE7939BCE31F3A" - "18BF2B", - 16, - ), - y_Z=int( - "49C27868F4ECA2179BFD7D59B1E3BF34C1DBDE61AE12931648F43E5963" - "2504DE", - 16, - ), - ) - - @pytest.mark.slow - def test_brainpoolP384r1(self): - self._do( - curve=curve_brainpoolp384r1, - generator=BRAINPOOLP384r1.generator, - dA=int( - "1E20F5E048A5886F1F157C74E91BDE2B98C8B52D58E5003D57053FC4B0B" - "D65D6F15EB5D1EE1610DF870795143627D042", - 16, - ), - x_qA=int( - "68B665DD91C195800650CDD363C625F4E742E8134667B767B1B476793" - "588F885AB698C852D4A6E77A252D6380FCAF068", - 16, - ), - y_qA=int( - "55BC91A39C9EC01DEE36017B7D673A931236D2F1F5C83942D049E3FA2" - "0607493E0D038FF2FD30C2AB67D15C85F7FAA59", - 16, - ), - dB=int( - "032640BC6003C59260F7250C3DB58CE647F98E1260ACCE4ACDA3DD869F7" - "4E01F8BA5E0324309DB6A9831497ABAC96670", - 16, - ), - x_qB=int( - "4D44326F269A597A5B58BBA565DA5556ED7FD9A8A9EB76C25F46DB69D" - "19DC8CE6AD18E404B15738B2086DF37E71D1EB4", - 16, - ), - y_qB=int( - "62D692136DE56CBE93BF5FA3188EF58BC8A3A0EC6C1E151A21038A42E" - "9185329B5B275903D192F8D4E1F32FE9CC78C48", - 16, - ), - x_Z=int( - "0BD9D3A7EA0B3D519D09D8E48D0785FB744A6B355E6304BC51C229FBBC" - "E239BBADF6403715C35D4FB2A5444F575D4F42", - 16, - ), - y_Z=int( - "0DF213417EBE4D8E40A5F76F66C56470C489A3478D146DECF6DF0D94BA" - "E9E598157290F8756066975F1DB34B2324B7BD", - 16, - ), - ) - - @pytest.mark.slow - def test_brainpoolP512r1(self): - self._do( - curve=curve_brainpoolp512r1, - generator=BRAINPOOLP512r1.generator, - dA=int( - "16302FF0DBBB5A8D733DAB7141C1B45ACBC8715939677F6A56850A38BD8" - "7BD59B09E80279609FF333EB9D4C061231FB26F92EEB04982A5F1D1764C" - "AD57665422", - 16, - ), - x_qA=int( - "0A420517E406AAC0ACDCE90FCD71487718D3B953EFD7FBEC5F7F27E28" - "C6149999397E91E029E06457DB2D3E640668B392C2A7E737A7F0BF044" - "36D11640FD09FD", - 16, - ), - y_qA=int( - "72E6882E8DB28AAD36237CD25D580DB23783961C8DC52DFA2EC138AD4" - "72A0FCEF3887CF62B623B2A87DE5C588301EA3E5FC269B373B60724F5" - "E82A6AD147FDE7", - 16, - ), - dB=int( - "230E18E1BCC88A362FA54E4EA3902009292F7F8033624FD471B5D8ACE49" - "D12CFABBC19963DAB8E2F1EBA00BFFB29E4D72D13F2224562F405CB8050" - "3666B25429", - 16, - ), - x_qB=int( - "9D45F66DE5D67E2E6DB6E93A59CE0BB48106097FF78A081DE781CDB31" - "FCE8CCBAAEA8DD4320C4119F1E9CD437A2EAB3731FA9668AB268D871D" - "EDA55A5473199F", - 16, - ), - y_qB=int( - "2FDC313095BCDD5FB3A91636F07A959C8E86B5636A1E930E8396049CB" - "481961D365CC11453A06C719835475B12CB52FC3C383BCE35E27EF194" - "512B71876285FA", - 16, - ), - x_Z=int( - "A7927098655F1F9976FA50A9D566865DC530331846381C87256BAF3226" - "244B76D36403C024D7BBF0AA0803EAFF405D3D24F11A9B5C0BEF679FE1" - "454B21C4CD1F", - 16, - ), - y_Z=int( - "7DB71C3DEF63212841C463E881BDCF055523BD368240E6C3143BD8DEF8" - "B3B3223B95E0F53082FF5E412F4222537A43DF1C6D25729DDB51620A83" - "2BE6A26680A2", - 16, - ), - ) - - -# https://tools.ietf.org/html/rfc4754#page-5 -@pytest.mark.parametrize( - "w, gwx, gwy, k, msg, md, r, s, curve", - [ - pytest.param( - "DC51D3866A15BACDE33D96F992FCA99DA7E6EF0934E7097559C27F1614C88A7F", - "2442A5CC0ECD015FA3CA31DC8E2BBC70BF42D60CBCA20085E0822CB04235E970", - "6FC98BD7E50211A4A27102FA3549DF79EBCB4BF246B80945CDDFE7D509BBFD7D", - "9E56F509196784D963D1C0A401510EE7ADA3DCC5DEE04B154BF61AF1D5A6DECE", - b"abc", - hashlib.sha256, - "CB28E0999B9C7715FD0A80D8E47A77079716CBBF917DD72E97566EA1C066957C", - "86FA3BB4E26CAD5BF90B7F81899256CE7594BB1EA0C89212748BFF3B3D5B0315", - NIST256p, - id="ECDSA-256", - ), - pytest.param( - "0BEB646634BA87735D77AE4809A0EBEA865535DE4C1E1DCB692E84708E81A5AF" - "62E528C38B2A81B35309668D73524D9F", - "96281BF8DD5E0525CA049C048D345D3082968D10FEDF5C5ACA0C64E6465A97EA" - "5CE10C9DFEC21797415710721F437922", - "447688BA94708EB6E2E4D59F6AB6D7EDFF9301D249FE49C33096655F5D502FAD" - "3D383B91C5E7EDAA2B714CC99D5743CA", - "B4B74E44D71A13D568003D7489908D564C7761E229C58CBFA18950096EB7463B" - "854D7FA992F934D927376285E63414FA", - b"abc", - hashlib.sha384, - "FB017B914E29149432D8BAC29A514640B46F53DDAB2C69948084E2930F1C8F7E" - "08E07C9C63F2D21A07DCB56A6AF56EB3", - "B263A1305E057F984D38726A1B46874109F417BCA112674C528262A40A629AF1" - "CBB9F516CE0FA7D2FF630863A00E8B9F", - NIST384p, - id="ECDSA-384", - ), - pytest.param( - "0065FDA3409451DCAB0A0EAD45495112A3D813C17BFD34BDF8C1209D7DF58491" - "20597779060A7FF9D704ADF78B570FFAD6F062E95C7E0C5D5481C5B153B48B37" - "5FA1", - "0151518F1AF0F563517EDD5485190DF95A4BF57B5CBA4CF2A9A3F6474725A35F" - "7AFE0A6DDEB8BEDBCD6A197E592D40188901CECD650699C9B5E456AEA5ADD190" - "52A8", - "006F3B142EA1BFFF7E2837AD44C9E4FF6D2D34C73184BBAD90026DD5E6E85317" - "D9DF45CAD7803C6C20035B2F3FF63AFF4E1BA64D1C077577DA3F4286C58F0AEA" - "E643", - "00C1C2B305419F5A41344D7E4359933D734096F556197A9B244342B8B62F46F9" - "373778F9DE6B6497B1EF825FF24F42F9B4A4BD7382CFC3378A540B1B7F0C1B95" - "6C2F", - b"abc", - hashlib.sha512, - "0154FD3836AF92D0DCA57DD5341D3053988534FDE8318FC6AAAAB68E2E6F4339" - "B19F2F281A7E0B22C269D93CF8794A9278880ED7DBB8D9362CAEACEE54432055" - "2251", - "017705A7030290D1CEB605A9A1BB03FF9CDD521E87A696EC926C8C10C8362DF4" - "975367101F67D1CF9BCCBF2F3D239534FA509E70AAC851AE01AAC68D62F86647" - "2660", - NIST521p, - id="ECDSA-521", - ), - ], -) -def test_RFC4754_vectors(w, gwx, gwy, k, msg, md, r, s, curve): - sk = SigningKey.from_string(unhexlify(w), curve) - vk = VerifyingKey.from_string(unhexlify(gwx + gwy), curve) - assert sk.verifying_key == vk - sig = sk.sign(msg, hashfunc=md, sigencode=sigencode_strings, k=int(k, 16)) - - assert sig == (unhexlify(r), unhexlify(s)) - - assert vk.verify(sig, msg, md, sigdecode_strings) diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_rw_lock.py b/venv/lib/python3.12/site-packages/ecdsa/test_rw_lock.py deleted file mode 100644 index 0a84b9c7..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_rw_lock.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright Mateusz Kobos, (c) 2011 -# https://code.activestate.com/recipes/577803-reader-writer-lock-with-priority-for-writers/ -# released under the MIT licence - -try: - import unittest2 as unittest -except ImportError: - import unittest -import threading -import time -import copy -from ._rwlock import RWLock - - -class Writer(threading.Thread): - def __init__( - self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write - ): - """ - @param buffer_: common buffer_ shared by the readers and writers - @type buffer_: list - @type rw_lock: L{RWLock} - @param init_sleep_time: sleep time before doing any action - @type init_sleep_time: C{float} - @param sleep_time: sleep time while in critical section - @type sleep_time: C{float} - @param to_write: data that will be appended to the buffer - """ - threading.Thread.__init__(self) - self.__buffer = buffer_ - self.__rw_lock = rw_lock - self.__init_sleep_time = init_sleep_time - self.__sleep_time = sleep_time - self.__to_write = to_write - self.entry_time = None - """Time of entry to the critical section""" - self.exit_time = None - """Time of exit from the critical section""" - - def run(self): - time.sleep(self.__init_sleep_time) - self.__rw_lock.writer_acquire() - self.entry_time = time.time() - time.sleep(self.__sleep_time) - self.__buffer.append(self.__to_write) - self.exit_time = time.time() - self.__rw_lock.writer_release() - - -class Reader(threading.Thread): - def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time): - """ - @param buffer_: common buffer shared by the readers and writers - @type buffer_: list - @type rw_lock: L{RWLock} - @param init_sleep_time: sleep time before doing any action - @type init_sleep_time: C{float} - @param sleep_time: sleep time while in critical section - @type sleep_time: C{float} - """ - threading.Thread.__init__(self) - self.__buffer = buffer_ - self.__rw_lock = rw_lock - self.__init_sleep_time = init_sleep_time - self.__sleep_time = sleep_time - self.buffer_read = None - """a copy of a the buffer read while in critical section""" - self.entry_time = None - """Time of entry to the critical section""" - self.exit_time = None - """Time of exit from the critical section""" - - def run(self): - time.sleep(self.__init_sleep_time) - self.__rw_lock.reader_acquire() - self.entry_time = time.time() - time.sleep(self.__sleep_time) - self.buffer_read = copy.deepcopy(self.__buffer) - self.exit_time = time.time() - self.__rw_lock.reader_release() - - -class RWLockTestCase(unittest.TestCase): - def test_readers_nonexclusive_access(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Reader(buffer_, rw_lock, 0, 0)) - threads.append(Writer(buffer_, rw_lock, 0.2, 0.4, 1)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0.3)) - threads.append(Reader(buffer_, rw_lock, 0.5, 0)) - - self.__start_and_join_threads(threads) - - ## The third reader should enter after the second one but it should - ## exit before the second one exits - ## (i.e. the readers should be in the critical section - ## at the same time) - - self.assertEqual([], threads[0].buffer_read) - self.assertEqual([1], threads[2].buffer_read) - self.assertEqual([1], threads[3].buffer_read) - self.assertTrue(threads[1].exit_time <= threads[2].entry_time) - self.assertTrue(threads[2].entry_time <= threads[3].entry_time) - self.assertTrue(threads[3].exit_time < threads[2].exit_time) - - def test_writers_exclusive_access(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Writer(buffer_, rw_lock, 0, 0.4, 1)) - threads.append(Writer(buffer_, rw_lock, 0.1, 0, 2)) - threads.append(Reader(buffer_, rw_lock, 0.2, 0)) - - self.__start_and_join_threads(threads) - - ## The second writer should wait for the first one to exit - - self.assertEqual([1, 2], threads[2].buffer_read) - self.assertTrue(threads[0].exit_time <= threads[1].entry_time) - self.assertTrue(threads[1].exit_time <= threads[2].exit_time) - - def test_writer_priority(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) - threads.append(Reader(buffer_, rw_lock, 0.1, 0.4)) - threads.append(Writer(buffer_, rw_lock, 0.2, 0, 2)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0)) - - self.__start_and_join_threads(threads) - - ## The second writer should go before the second and the third reader - - self.assertEqual([1], threads[1].buffer_read) - self.assertEqual([1, 2], threads[3].buffer_read) - self.assertEqual([1, 2], threads[4].buffer_read) - self.assertTrue(threads[0].exit_time < threads[1].entry_time) - self.assertTrue(threads[1].exit_time <= threads[2].entry_time) - self.assertTrue(threads[2].exit_time <= threads[3].entry_time) - self.assertTrue(threads[2].exit_time <= threads[4].entry_time) - - def test_many_writers_priority(self): - (buffer_, rw_lock, threads) = self.__init_variables() - - threads.append(Writer(buffer_, rw_lock, 0, 0, 1)) - threads.append(Reader(buffer_, rw_lock, 0.1, 0.6)) - threads.append(Writer(buffer_, rw_lock, 0.2, 0.1, 2)) - threads.append(Reader(buffer_, rw_lock, 0.3, 0)) - threads.append(Reader(buffer_, rw_lock, 0.4, 0)) - threads.append(Writer(buffer_, rw_lock, 0.5, 0.1, 3)) - - self.__start_and_join_threads(threads) - - ## The two last writers should go first -- after the first reader and - ## before the second and the third reader - - self.assertEqual([1], threads[1].buffer_read) - self.assertEqual([1, 2, 3], threads[3].buffer_read) - self.assertEqual([1, 2, 3], threads[4].buffer_read) - self.assertTrue(threads[0].exit_time < threads[1].entry_time) - self.assertTrue(threads[1].exit_time <= threads[2].entry_time) - self.assertTrue(threads[1].exit_time <= threads[5].entry_time) - self.assertTrue(threads[2].exit_time <= threads[3].entry_time) - self.assertTrue(threads[2].exit_time <= threads[4].entry_time) - self.assertTrue(threads[5].exit_time <= threads[3].entry_time) - self.assertTrue(threads[5].exit_time <= threads[4].entry_time) - - @staticmethod - def __init_variables(): - buffer_ = [] - rw_lock = RWLock() - threads = [] - return (buffer_, rw_lock, threads) - - @staticmethod - def __start_and_join_threads(threads): - for t in threads: - t.start() - for t in threads: - t.join() diff --git a/venv/lib/python3.12/site-packages/ecdsa/test_sha3.py b/venv/lib/python3.12/site-packages/ecdsa/test_sha3.py deleted file mode 100644 index d30381d7..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/test_sha3.py +++ /dev/null @@ -1,111 +0,0 @@ -try: - import unittest2 as unittest -except ImportError: - import unittest -import pytest - -try: - from gmpy2 import mpz - - GMPY = True -except ImportError: # pragma: no cover - try: - from gmpy import mpz - - GMPY = True - except ImportError: - GMPY = False - -from ._sha3 import shake_256 -from ._compat import bytes_to_int, int_to_bytes - -B2I_VECTORS = [ - (b"\x00\x01", "big", 1), - (b"\x00\x01", "little", 0x0100), - (b"", "big", 0), - (b"\x00", "little", 0), -] - - -@pytest.mark.parametrize("bytes_in,endian,int_out", B2I_VECTORS) -def test_bytes_to_int(bytes_in, endian, int_out): - out = bytes_to_int(bytes_in, endian) - assert out == int_out - - -class TestBytesToInt(unittest.TestCase): - def test_bytes_to_int_wrong_endian(self): - with self.assertRaises(ValueError): - bytes_to_int(b"\x00", "middle") - - def test_int_to_bytes_wrong_endian(self): - with self.assertRaises(ValueError): - int_to_bytes(0, byteorder="middle") - - -@pytest.mark.skipif(GMPY == False, reason="requires gmpy or gmpy2") -def test_int_to_bytes_with_gmpy(): - assert int_to_bytes(mpz(1)) == b"\x01" - - -I2B_VECTORS = [ - (0, None, "big", b""), - (0, 1, "big", b"\x00"), - (1, None, "big", b"\x01"), - (0x0100, None, "little", b"\x00\x01"), - (0x0100, 4, "little", b"\x00\x01\x00\x00"), - (1, 4, "big", b"\x00\x00\x00\x01"), -] - - -@pytest.mark.parametrize("int_in,length,endian,bytes_out", I2B_VECTORS) -def test_int_to_bytes(int_in, length, endian, bytes_out): - out = int_to_bytes(int_in, length, endian) - assert out == bytes_out - - -SHAKE_256_VECTORS = [ - ( - b"Message.", - 32, - b"\x78\xa1\x37\xbb\x33\xae\xe2\x72\xb1\x02\x4f\x39\x43\xe5\xcf\x0c" - b"\x4e\x9c\x72\x76\x2e\x34\x4c\xf8\xf9\xc3\x25\x9d\x4f\x91\x2c\x3a", - ), - ( - b"", - 32, - b"\x46\xb9\xdd\x2b\x0b\xa8\x8d\x13\x23\x3b\x3f\xeb\x74\x3e\xeb\x24" - b"\x3f\xcd\x52\xea\x62\xb8\x1b\x82\xb5\x0c\x27\x64\x6e\xd5\x76\x2f", - ), - ( - b"message", - 32, - b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" - b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75", - ), - ( - b"message", - 16, - b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51", - ), - ( - b"message", - 64, - b"\x86\x16\xe1\xe4\xcf\xd8\xb5\xf7\xd9\x2d\x43\xd8\x6e\x1b\x14\x51" - b"\xa2\xa6\x5a\xf8\x64\xfc\xb1\x26\xc2\x66\x0a\xb3\x46\x51\xb1\x75" - b"\x30\xd6\xba\x2a\x46\x65\xf1\x9d\xf0\x62\x25\xb1\x26\xd1\x3e\xed" - b"\x91\xd5\x0d\xe7\xb9\xcb\x65\xf3\x3a\x46\xae\xd3\x6c\x7d\xc5\xe8", - ), - ( - b"A" * 1024, - 32, - b"\xa5\xef\x7e\x30\x8b\xe8\x33\x64\xe5\x9c\xf3\xb5\xf3\xba\x20\xa3" - b"\x5a\xe7\x30\xfd\xbc\x33\x11\xbf\x83\x89\x50\x82\xb4\x41\xe9\xb3", - ), -] - - -@pytest.mark.parametrize("msg,olen,ohash", SHAKE_256_VECTORS) -def test_shake_256(msg, olen, ohash): - out = shake_256(msg, olen) - assert out == bytearray(ohash) diff --git a/venv/lib/python3.12/site-packages/ecdsa/util.py b/venv/lib/python3.12/site-packages/ecdsa/util.py deleted file mode 100644 index 1aff5bf5..00000000 --- a/venv/lib/python3.12/site-packages/ecdsa/util.py +++ /dev/null @@ -1,533 +0,0 @@ -""" -This module includes some utility functions. - -The methods most typically used are the sigencode and sigdecode functions -to be used with :func:`~ecdsa.keys.SigningKey.sign` and -:func:`~ecdsa.keys.VerifyingKey.verify` -respectively. See the :func:`sigencode_strings`, :func:`sigdecode_string`, -:func:`sigencode_der`, :func:`sigencode_strings_canonize`, -:func:`sigencode_string_canonize`, :func:`sigencode_der_canonize`, -:func:`sigdecode_strings`, :func:`sigdecode_string`, and -:func:`sigdecode_der` functions. -""" - -from __future__ import division - -import os -import math -import binascii -import sys -from hashlib import sha256 -from six import PY2, int2byte, next -from . import der -from ._compat import normalise_bytes - - -# RFC5480: -# The "unrestricted" algorithm identifier is: -# id-ecPublicKey OBJECT IDENTIFIER ::= { -# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 } - -oid_ecPublicKey = (1, 2, 840, 10045, 2, 1) -encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey) - -# RFC5480: -# The ECDH algorithm uses the following object identifier: -# id-ecDH OBJECT IDENTIFIER ::= { -# iso(1) identified-organization(3) certicom(132) schemes(1) -# ecdh(12) } - -oid_ecDH = (1, 3, 132, 1, 12) - -# RFC5480: -# The ECMQV algorithm uses the following object identifier: -# id-ecMQV OBJECT IDENTIFIER ::= { -# iso(1) identified-organization(3) certicom(132) schemes(1) -# ecmqv(13) } - -oid_ecMQV = (1, 3, 132, 1, 13) - -if sys.version_info >= (3,): # pragma: no branch - - def entropy_to_bits(ent_256): - """Convert a bytestring to string of 0's and 1's""" - return bin(int.from_bytes(ent_256, "big"))[2:].zfill(len(ent_256) * 8) - -else: - - def entropy_to_bits(ent_256): - """Convert a bytestring to string of 0's and 1's""" - return "".join(bin(ord(x))[2:].zfill(8) for x in ent_256) - - -if sys.version_info < (2, 7): # pragma: no branch - # Can't add a method to a built-in type so we are stuck with this - def bit_length(x): - return len(bin(x)) - 2 - -else: - - def bit_length(x): - return x.bit_length() or 1 - - -def orderlen(order): - return (1 + len("%x" % order)) // 2 # bytes - - -def randrange(order, entropy=None): - """Return a random integer k such that 1 <= k < order, uniformly - distributed across that range. Worst case should be a mean of 2 loops at - (2**k)+2. - - Note that this function is not declared to be forwards-compatible: we may - change the behavior in future releases. The entropy= argument (which - should get a callable that behaves like os.urandom) can be used to - achieve stability within a given release (for repeatable unit tests), but - should not be used as a long-term-compatible key generation algorithm. - """ - assert order > 1 - if entropy is None: - entropy = os.urandom - upper_2 = bit_length(order - 2) - upper_256 = upper_2 // 8 + 1 - while True: # I don't think this needs a counter with bit-wise randrange - ent_256 = entropy(upper_256) - ent_2 = entropy_to_bits(ent_256) - rand_num = int(ent_2[:upper_2], base=2) + 1 - if 0 < rand_num < order: - return rand_num - - -class PRNG: - # this returns a callable which, when invoked with an integer N, will - # return N pseudorandom bytes. Note: this is a short-term PRNG, meant - # primarily for the needs of randrange_from_seed__trytryagain(), which - # only needs to run it a few times per seed. It does not provide - # protection against state compromise (forward security). - def __init__(self, seed): - self.generator = self.block_generator(seed) - - def __call__(self, numbytes): - a = [next(self.generator) for i in range(numbytes)] - - if PY2: # pragma: no branch - return "".join(a) - else: - return bytes(a) - - def block_generator(self, seed): - counter = 0 - while True: - for byte in sha256( - ("prng-%d-%s" % (counter, seed)).encode() - ).digest(): - yield byte - counter += 1 - - -def randrange_from_seed__overshoot_modulo(seed, order): - # hash the data, then turn the digest into a number in [1,order). - # - # We use David-Sarah Hopwood's suggestion: turn it into a number that's - # sufficiently larger than the group order, then modulo it down to fit. - # This should give adequate (but not perfect) uniformity, and simple - # code. There are other choices: try-try-again is the main one. - base = PRNG(seed)(2 * orderlen(order)) - number = (int(binascii.hexlify(base), 16) % (order - 1)) + 1 - assert 1 <= number < order, (1, number, order) - return number - - -def lsb_of_ones(numbits): - return (1 << numbits) - 1 - - -def bits_and_bytes(order): - bits = int(math.log(order - 1, 2) + 1) - bytes = bits // 8 - extrabits = bits % 8 - return bits, bytes, extrabits - - -# the following randrange_from_seed__METHOD() functions take an -# arbitrarily-sized secret seed and turn it into a number that obeys the same -# range limits as randrange() above. They are meant for deriving consistent -# signing keys from a secret rather than generating them randomly, for -# example a protocol in which three signing keys are derived from a master -# secret. You should use a uniformly-distributed unguessable seed with about -# curve.baselen bytes of entropy. To use one, do this: -# seed = os.urandom(curve.baselen) # or other starting point -# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order) -# sk = SigningKey.from_secret_exponent(secexp, curve) - - -def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256): - # hash the seed, then turn the digest into a number in [1,order), but - # don't worry about trying to uniformly fill the range. This will lose, - # on average, four bits of entropy. - bits, _bytes, extrabits = bits_and_bytes(order) - if extrabits: - _bytes += 1 - base = hashmod(seed).digest()[:_bytes] - base = "\x00" * (_bytes - len(base)) + base - number = 1 + int(binascii.hexlify(base), 16) - assert 1 <= number < order - return number - - -def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256): - # like string_to_randrange_truncate_bytes, but only lose an average of - # half a bit - bits = int(math.log(order - 1, 2) + 1) - maxbytes = (bits + 7) // 8 - base = hashmod(seed).digest()[:maxbytes] - base = "\x00" * (maxbytes - len(base)) + base - topbits = 8 * maxbytes - bits - if topbits: - base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:] - number = 1 + int(binascii.hexlify(base), 16) - assert 1 <= number < order - return number - - -def randrange_from_seed__trytryagain(seed, order): - # figure out exactly how many bits we need (rounded up to the nearest - # bit), so we can reduce the chance of looping to less than 0.5 . This is - # specified to feed from a byte-oriented PRNG, and discards the - # high-order bits of the first byte as necessary to get the right number - # of bits. The average number of loops will range from 1.0 (when - # order=2**k-1) to 2.0 (when order=2**k+1). - assert order > 1 - bits, bytes, extrabits = bits_and_bytes(order) - generate = PRNG(seed) - while True: - extrabyte = b"" - if extrabits: - extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits)) - guess = string_to_number(extrabyte + generate(bytes)) + 1 - if 1 <= guess < order: - return guess - - -def number_to_string(num, order): - l = orderlen(order) - fmt_str = "%0" + str(2 * l) + "x" - string = binascii.unhexlify((fmt_str % num).encode()) - assert len(string) == l, (len(string), l) - return string - - -def number_to_string_crop(num, order): - l = orderlen(order) - fmt_str = "%0" + str(2 * l) + "x" - string = binascii.unhexlify((fmt_str % num).encode()) - return string[:l] - - -def string_to_number(string): - return int(binascii.hexlify(string), 16) - - -def string_to_number_fixedlen(string, order): - l = orderlen(order) - assert len(string) == l, (len(string), l) - return int(binascii.hexlify(string), 16) - - -def sigencode_strings(r, s, order): - """ - Encode the signature to a pair of strings in a tuple - - Encodes signature into raw encoding (:term:`raw encoding`) with the - ``r`` and ``s`` parts of the signature encoded separately. - - It's expected that this function will be used as a ``sigencode=`` parameter - in :func:`ecdsa.keys.SigningKey.sign` method. - - :param int r: first parameter of the signature - :param int s: second parameter of the signature - :param int order: the order of the curve over which the signature was - computed - - :return: raw encoding of ECDSA signature - :rtype: tuple(bytes, bytes) - """ - r_str = number_to_string(r, order) - s_str = number_to_string(s, order) - return (r_str, s_str) - - -def sigencode_string(r, s, order): - """ - Encode the signature to raw format (:term:`raw encoding`) - - It's expected that this function will be used as a ``sigencode=`` parameter - in :func:`ecdsa.keys.SigningKey.sign` method. - - :param int r: first parameter of the signature - :param int s: second parameter of the signature - :param int order: the order of the curve over which the signature was - computed - - :return: raw encoding of ECDSA signature - :rtype: bytes - """ - # for any given curve, the size of the signature numbers is - # fixed, so just use simple concatenation - r_str, s_str = sigencode_strings(r, s, order) - return r_str + s_str - - -def sigencode_der(r, s, order): - """ - Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. - - Encodes the signature to the following :term:`ASN.1` structure:: - - Ecdsa-Sig-Value ::= SEQUENCE { - r INTEGER, - s INTEGER - } - - It's expected that this function will be used as a ``sigencode=`` parameter - in :func:`ecdsa.keys.SigningKey.sign` method. - - :param int r: first parameter of the signature - :param int s: second parameter of the signature - :param int order: the order of the curve over which the signature was - computed - - :return: DER encoding of ECDSA signature - :rtype: bytes - """ - return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) - - -def _canonize(s, order): - """ - Internal function for ensuring that the ``s`` value of a signature is in - the "canonical" format. - - :param int s: the second parameter of ECDSA signature - :param int order: the order of the curve over which the signatures was - computed - - :return: canonical value of s - :rtype: int - """ - if s > order // 2: - s = order - s - return s - - -def sigencode_strings_canonize(r, s, order): - """ - Encode the signature to a pair of strings in a tuple - - Encodes signature into raw encoding (:term:`raw encoding`) with the - ``r`` and ``s`` parts of the signature encoded separately. - - Makes sure that the signature is encoded in the canonical format, where - the ``s`` parameter is always smaller than ``order / 2``. - Most commonly used in bitcoin. - - It's expected that this function will be used as a ``sigencode=`` parameter - in :func:`ecdsa.keys.SigningKey.sign` method. - - :param int r: first parameter of the signature - :param int s: second parameter of the signature - :param int order: the order of the curve over which the signature was - computed - - :return: raw encoding of ECDSA signature - :rtype: tuple(bytes, bytes) - """ - s = _canonize(s, order) - return sigencode_strings(r, s, order) - - -def sigencode_string_canonize(r, s, order): - """ - Encode the signature to raw format (:term:`raw encoding`) - - Makes sure that the signature is encoded in the canonical format, where - the ``s`` parameter is always smaller than ``order / 2``. - Most commonly used in bitcoin. - - It's expected that this function will be used as a ``sigencode=`` parameter - in :func:`ecdsa.keys.SigningKey.sign` method. - - :param int r: first parameter of the signature - :param int s: second parameter of the signature - :param int order: the order of the curve over which the signature was - computed - - :return: raw encoding of ECDSA signature - :rtype: bytes - """ - s = _canonize(s, order) - return sigencode_string(r, s, order) - - -def sigencode_der_canonize(r, s, order): - """ - Encode the signature into the ECDSA-Sig-Value structure using :term:`DER`. - - Makes sure that the signature is encoded in the canonical format, where - the ``s`` parameter is always smaller than ``order / 2``. - Most commonly used in bitcoin. - - Encodes the signature to the following :term:`ASN.1` structure:: - - Ecdsa-Sig-Value ::= SEQUENCE { - r INTEGER, - s INTEGER - } - - It's expected that this function will be used as a ``sigencode=`` parameter - in :func:`ecdsa.keys.SigningKey.sign` method. - - :param int r: first parameter of the signature - :param int s: second parameter of the signature - :param int order: the order of the curve over which the signature was - computed - - :return: DER encoding of ECDSA signature - :rtype: bytes - """ - s = _canonize(s, order) - return sigencode_der(r, s, order) - - -class MalformedSignature(Exception): - """ - Raised by decoding functions when the signature is malformed. - - Malformed in this context means that the relevant strings or integers - do not match what a signature over provided curve would create. Either - because the byte strings have incorrect lengths or because the encoded - values are too large. - """ - - pass - - -def sigdecode_string(signature, order): - """ - Decoder for :term:`raw encoding` of ECDSA signatures. - - raw encoding is a simple concatenation of the two integers that comprise - the signature, with each encoded using the same amount of bytes depending - on curve size/order. - - It's expected that this function will be used as the ``sigdecode=`` - parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. - - :param signature: encoded signature - :type signature: bytes like object - :param order: order of the curve over which the signature was computed - :type order: int - - :raises MalformedSignature: when the encoding of the signature is invalid - - :return: tuple with decoded ``r`` and ``s`` values of signature - :rtype: tuple of ints - """ - signature = normalise_bytes(signature) - l = orderlen(order) - if not len(signature) == 2 * l: - raise MalformedSignature( - "Invalid length of signature, expected {0} bytes long, " - "provided string is {1} bytes long".format(2 * l, len(signature)) - ) - r = string_to_number_fixedlen(signature[:l], order) - s = string_to_number_fixedlen(signature[l:], order) - return r, s - - -def sigdecode_strings(rs_strings, order): - """ - Decode the signature from two strings. - - First string needs to be a big endian encoding of ``r``, second needs to - be a big endian encoding of the ``s`` parameter of an ECDSA signature. - - It's expected that this function will be used as the ``sigdecode=`` - parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. - - :param list rs_strings: list of two bytes-like objects, each encoding one - parameter of signature - :param int order: order of the curve over which the signature was computed - - :raises MalformedSignature: when the encoding of the signature is invalid - - :return: tuple with decoded ``r`` and ``s`` values of signature - :rtype: tuple of ints - """ - if not len(rs_strings) == 2: - raise MalformedSignature( - "Invalid number of strings provided: {0}, expected 2".format( - len(rs_strings) - ) - ) - (r_str, s_str) = rs_strings - r_str = normalise_bytes(r_str) - s_str = normalise_bytes(s_str) - l = orderlen(order) - if not len(r_str) == l: - raise MalformedSignature( - "Invalid length of first string ('r' parameter), " - "expected {0} bytes long, provided string is {1} " - "bytes long".format(l, len(r_str)) - ) - if not len(s_str) == l: - raise MalformedSignature( - "Invalid length of second string ('s' parameter), " - "expected {0} bytes long, provided string is {1} " - "bytes long".format(l, len(s_str)) - ) - r = string_to_number_fixedlen(r_str, order) - s = string_to_number_fixedlen(s_str, order) - return r, s - - -def sigdecode_der(sig_der, order): - """ - Decoder for DER format of ECDSA signatures. - - DER format of signature is one that uses the :term:`ASN.1` :term:`DER` - rules to encode it as a sequence of two integers:: - - Ecdsa-Sig-Value ::= SEQUENCE { - r INTEGER, - s INTEGER - } - - It's expected that this function will be used as as the ``sigdecode=`` - parameter to the :func:`ecdsa.keys.VerifyingKey.verify` method. - - :param sig_der: encoded signature - :type sig_der: bytes like object - :param order: order of the curve over which the signature was computed - :type order: int - - :raises UnexpectedDER: when the encoding of signature is invalid - - :return: tuple with decoded ``r`` and ``s`` values of signature - :rtype: tuple of ints - """ - sig_der = normalise_bytes(sig_der) - # return der.encode_sequence(der.encode_integer(r), der.encode_integer(s)) - rs_strings, empty = der.remove_sequence(sig_der) - if empty != b"": - raise der.UnexpectedDER( - "trailing junk after DER sig: %s" % binascii.hexlify(empty) - ) - r, rest = der.remove_integer(rs_strings) - s, empty = der.remove_integer(rest) - if empty != b"": - raise der.UnexpectedDER( - "trailing junk after DER numbers: %s" % binascii.hexlify(empty) - ) - return r, s diff --git a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/LICENSE b/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/LICENSE deleted file mode 100644 index 122e7a71..00000000 --- a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -This is free and unencumbered software released into the public -domain. - -Anyone is free to copy, modify, publish, use, compile, sell, or -distribute this software, either in source code form or as a -compiled binary, for any purpose, commercial or non-commercial, -and by any means. - -In jurisdictions that recognize copyright laws, the author or -authors of this software dedicate any and all copyright -interest in the software to the public domain. We make this -dedication for the benefit of the public at large and to the -detriment of our heirs and successors. We intend this -dedication to be an overt act of relinquishment in perpetuity -of all present and future rights to this software under -copyright law. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR -ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF -CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - -For more information, please refer to diff --git a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/METADATA deleted file mode 100644 index 7f7350ec..00000000 --- a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/METADATA +++ /dev/null @@ -1,465 +0,0 @@ -Metadata-Version: 2.1 -Name: email_validator -Version: 2.2.0 -Summary: A robust email address syntax and deliverability validation library. -Home-page: https://github.com/JoshData/python-email-validator -Author: Joshua Tauberer -Author-email: jt@occams.info -License: Unlicense -Keywords: email address validator -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: The Unlicense (Unlicense) -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: dnspython >=2.0.0 -Requires-Dist: idna >=2.0.0 - -email-validator: Validate Email Addresses -========================================= - -A robust email address syntax and deliverability validation library for -Python 3.8+ by [Joshua Tauberer](https://joshdata.me). - -This library validates that a string is of the form `name@example.com` -and optionally checks that the domain name is set up to receive email. -This is the sort of validation you would want when you are identifying -users by their email address like on a registration form. - -Key features: - -* Checks that an email address has the correct syntax --- great for - email-based registration/login forms or validing data. -* Gives friendly English error messages when validation fails that you - can display to end-users. -* Checks deliverability (optional): Does the domain name resolve? - (You can override the default DNS resolver to add query caching.) -* Supports internationalized domain names (like `@ツ.life`), - internationalized local parts (like `ツ@example.com`), - and optionally parses display names (e.g. `"My Name" `). -* Rejects addresses with invalid or unsafe Unicode characters, - obsolete email address syntax that you'd find unexpected, - special use domain names like `@localhost`, - and domains without a dot by default. - This is an opinionated library! -* Normalizes email addresses (important for internationalized - and quoted-string addresses! see below). -* Python type annotations are used. - -This is an opinionated library. You should definitely also consider using -the less-opinionated [pyIsEmail](https://github.com/michaelherold/pyIsEmail) -if it works better for you. - -[![Build Status](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml/badge.svg)](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml) - -View the [CHANGELOG / Release Notes](CHANGELOG.md) for the version history of changes in the library. Occasionally this README is ahead of the latest published package --- see the CHANGELOG for details. - ---- - -Installation ------------- - -This package [is on PyPI](https://pypi.org/project/email-validator/), so: - -```sh -pip install email-validator -``` - -(You might need to use `pip3` depending on your local environment.) - -Quick Start ------------ - -If you're validating a user's email address before creating a user -account in your application, you might do this: - -```python -from email_validator import validate_email, EmailNotValidError - -email = "my+address@example.org" - -try: - - # Check that the email address is valid. Turn on check_deliverability - # for first-time validations like on account creation pages (but not - # login pages). - emailinfo = validate_email(email, check_deliverability=False) - - # After this point, use only the normalized form of the email address, - # especially before going to a database query. - email = emailinfo.normalized - -except EmailNotValidError as e: - - # The exception message is human-readable explanation of why it's - # not a valid (or deliverable) email address. - print(str(e)) -``` - -This validates the address and gives you its normalized form. You should -**put the normalized form in your database** and always normalize before -checking if an address is in your database. When using this in a login form, -set `check_deliverability` to `False` to avoid unnecessary DNS queries. - -Usage ------ - -### Overview - -The module provides a function `validate_email(email_address)` which -takes an email address and: - -- Raises a `EmailNotValidError` with a helpful, human-readable error - message explaining why the email address is not valid, or -- Returns an object with a normalized form of the email address (which - you should use!) and other information about it. - -When an email address is not valid, `validate_email` raises either an -`EmailSyntaxError` if the form of the address is invalid or an -`EmailUndeliverableError` if the domain name fails DNS checks. Both -exception classes are subclasses of `EmailNotValidError`, which in turn -is a subclass of `ValueError`. - -But when an email address is valid, an object is returned containing -a normalized form of the email address (which you should use!) and -other information. - -The validator doesn't, by default, permit obsoleted forms of email addresses -that no one uses anymore even though they are still valid and deliverable, since -they will probably give you grief if you're using email for login. (See -later in the document about how to allow some obsolete forms.) - -The validator optionally checks that the domain name in the email address has -a DNS MX record indicating that it can receive email. (Except a Null MX record. -If there is no MX record, a fallback A/AAAA-record is permitted, unless -a reject-all SPF record is present.) DNS is slow and sometimes unavailable or -unreliable, so consider whether these checks are useful for your use case and -turn them off if they aren't. -There is nothing to be gained by trying to actually contact an SMTP server, so -that's not done here. For privacy, security, and practicality reasons, servers -are good at not giving away whether an address is -deliverable or not: email addresses that appear to accept mail at first -can bounce mail after a delay, and bounced mail may indicate a temporary -failure of a good email address (sometimes an intentional failure, like -greylisting). - -### Options - -The `validate_email` function also accepts the following keyword arguments -(defaults are as shown below): - -`check_deliverability=True`: If true, DNS queries are made to check that the domain name in the email address (the part after the @-sign) can receive mail, as described above. Set to `False` to skip this DNS-based check. It is recommended to pass `False` when performing validation for login pages (but not account creation pages) since re-validation of a previously validated domain in your database by querying DNS at every login is probably undesirable. You can also set `email_validator.CHECK_DELIVERABILITY` to `False` to turn this off for all calls by default. - -`dns_resolver=None`: Pass an instance of [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to control the DNS resolver including setting a timeout and [a cache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html). The `caching_resolver` function shown below is a helper function to construct a dns.resolver.Resolver with a [LRUCache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html#dns.resolver.LRUCache). Reuse the same resolver instance across calls to `validate_email` to make use of the cache. - -`test_environment=False`: If `True`, DNS-based deliverability checks are disabled and `test` and `**.test` domain names are permitted (see below). You can also set `email_validator.TEST_ENVIRONMENT` to `True` to turn it on for all calls by default. - -`allow_smtputf8=True`: Set to `False` to prohibit internationalized addresses that would - require the - [SMTPUTF8](https://tools.ietf.org/html/rfc6531) extension. You can also set `email_validator.ALLOW_SMTPUTF8` to `False` to turn it off for all calls by default. - -`allow_quoted_local=False`: Set to `True` to allow obscure and potentially problematic email addresses in which the part of the address before the @-sign contains spaces, @-signs, or other surprising characters when the local part is surrounded in quotes (so-called quoted-string local parts). In the object returned by `validate_email`, the normalized local part removes any unnecessary backslash-escaping and even removes the surrounding quotes if the address would be valid without them. You can also set `email_validator.ALLOW_QUOTED_LOCAL` to `True` to turn this on for all calls by default. - -`allow_domain_literal=False`: Set to `True` to allow bracketed IPv4 and "IPv6:"-prefixd IPv6 addresses in the domain part of the email address. No deliverability checks are performed for these addresses. In the object returned by `validate_email`, the normalized domain will use the condensed IPv6 format, if applicable. The object's `domain_address` attribute will hold the parsed `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object if applicable. You can also set `email_validator.ALLOW_DOMAIN_LITERAL` to `True` to turn this on for all calls by default. - -`allow_display_name=False`: Set to `True` to allow a display name and bracketed address in the input string, like `My Name `. It's implemented in the spirit but not the letter of RFC 5322 3.4, so it may be stricter or more relaxed than what you want. The display name, if present, is provided in the returned object's `display_name` field after being unquoted and unescaped. You can also set `email_validator.ALLOW_DISPLAY_NAME` to `True` to turn this on for all calls by default. - -`allow_empty_local=False`: Set to `True` to allow an empty local part (i.e. - `@example.com`), e.g. for validating Postfix aliases. - - -### DNS timeout and cache - -When validating many email addresses or to control the timeout (the default is 15 seconds), create a caching [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to reuse in each call. The `caching_resolver` function returns one easily for you: - -```python -from email_validator import validate_email, caching_resolver - -resolver = caching_resolver(timeout=10) - -while True: - validate_email(email, dns_resolver=resolver) -``` - -### Test addresses - -This library rejects email addresses that use the [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) `invalid`, `localhost`, `test`, and some others by raising `EmailSyntaxError`. This is to protect your system from abuse: You probably don't want a user to be able to cause an email to be sent to `localhost` (although they might be able to still do so via a malicious MX record). However, in your non-production test environments you may want to use `@test` or `@myname.test` email addresses. There are three ways you can allow this: - -1. Add `test_environment=True` to the call to `validate_email` (see above). -2. Set `email_validator.TEST_ENVIRONMENT` to `True` globally. -3. Remove the special-use domain name that you want to use from `email_validator.SPECIAL_USE_DOMAIN_NAMES`, e.g.: - -```python -import email_validator -email_validator.SPECIAL_USE_DOMAIN_NAMES.remove("test") -``` - -It is tempting to use `@example.com/net/org` in tests. They are *not* in this library's `SPECIAL_USE_DOMAIN_NAMES` list so you can, but shouldn't, use them. These domains are reserved to IANA for use in documentation so there is no risk of accidentally emailing someone at those domains. But beware that this library will nevertheless reject these domain names if DNS-based deliverability checks are not disabled because these domains do not resolve to domains that accept email. In tests, consider using your own domain name or `@test` or `@myname.test` instead. - -Internationalized email addresses ---------------------------------- - -The email protocol SMTP and the domain name system DNS have historically -only allowed English (ASCII) characters in email addresses and domain names, -respectively. Each has adapted to internationalization in a separate -way, creating two separate aspects to email address internationalization. - -(If your mail submission library doesn't support Unicode at all, then -immediately prior to mail submission you must replace the email address with -its ASCII-ized form. This library gives you back the ASCII-ized form in the -`ascii_email` field in the returned object.) - -### Internationalized domain names (IDN) - -The first is [internationalized domain names (RFC -5891)](https://tools.ietf.org/html/rfc5891), a.k.a IDNA 2008. The DNS -system has not been updated with Unicode support. Instead, internationalized -domain names are converted into a special IDNA ASCII "[Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)" -form starting with `xn--`. When an email address has non-ASCII -characters in its domain part, the domain part is replaced with its IDNA -ASCII equivalent form in the process of mail transmission. Your mail -submission library probably does this for you transparently. ([Compliance -around the web is not very good though](http://archives.miloush.net/michkap/archive/2012/02/27/10273315.html).) This library conforms to IDNA 2008 -using the [idna](https://github.com/kjd/idna) module by Kim Davies. - -### Internationalized local parts - -The second sort of internationalization is internationalization in the -*local* part of the address (before the @-sign). In non-internationalized -email addresses, only English letters, numbers, and some punctuation -(`._!#$%&'^``*+-=~/?{|}`) are allowed. In internationalized email address -local parts, a wider range of Unicode characters are allowed. - -Email addresses with these non-ASCII characters require that your mail -submission library and all the mail servers along the route to the destination, -including your own outbound mail server, all support the -[SMTPUTF8 (RFC 6531)](https://tools.ietf.org/html/rfc6531) extension. -Support for SMTPUTF8 varies. If you know ahead of time that SMTPUTF8 is not -supported by your mail submission stack, then you must filter out addresses that -require SMTPUTF8 using the `allow_smtputf8=False` keyword argument (see above). -This will cause the validation function to raise a `EmailSyntaxError` if -delivery would require SMTPUTF8. If you do not set `allow_smtputf8=False`, -you can also check the value of the `smtputf8` field in the returned object. - -### Unsafe Unicode characters are rejected - -A surprisingly large number of Unicode characters are not safe to display, -especially when the email address is concatenated with other text, so this -library tries to protect you by not permitting reserved, non-, private use, -formatting (which can be used to alter the display order of characters), -whitespace, and control characters, and combining characters -as the first character of the local part and the domain name (so that they -cannot combine with something outside of the email address string or with -the @-sign). See https://qntm.org/safe and https://trojansource.codes/ -for relevant prior work. (Other than whitespace, these are checks that -you should be applying to nearly all user inputs in a security-sensitive -context.) This does not guard against the well known problem that many -Unicode characters look alike, which can be used to fool humans reading -displayed text. - - -Normalization -------------- - -### Unicode Normalization - -The use of Unicode in email addresses introduced a normalization -problem. Different Unicode strings can look identical and have the same -semantic meaning to the user. The `normalized` field returned on successful -validation provides the correctly normalized form of the given email -address. - -For example, the CJK fullwidth Latin letters are considered semantically -equivalent in domain names to their ASCII counterparts. This library -normalizes them to their ASCII counterparts (as required by IDNA): - -```python -emailinfo = validate_email("me@Domain.com") -print(emailinfo.normalized) -print(emailinfo.ascii_email) -# prints "me@domain.com" twice -``` - -Because an end-user might type their email address in different (but -equivalent) un-normalized forms at different times, you ought to -replace what they enter with the normalized form immediately prior to -going into your database (during account creation), querying your database -(during login), or sending outbound mail. - -The normalizations include lowercasing the domain part of the email -address (domain names are case-insensitive), [Unicode "NFC" -normalization](https://en.wikipedia.org/wiki/Unicode_equivalence) of the -whole address (which turns characters plus [combining -characters](https://en.wikipedia.org/wiki/Combining_character) into -precomposed characters where possible, replacement of [fullwidth and -halfwidth -characters](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) -in the domain part, possibly other -[UTS46](http://unicode.org/reports/tr46) mappings on the domain part, -and conversion from Punycode to Unicode characters. - -Normalization may change the characters in the email address and the -length of the email address, such that a string might be a valid address -before normalization but invalid after, or vice versa. This library only -permits addresses that are valid both before and after normalization. - -(See [RFC 6532 (internationalized email) section -3.1](https://tools.ietf.org/html/rfc6532#section-3.1) and [RFC 5895 -(IDNA 2008) section 2](http://www.ietf.org/rfc/rfc5895.txt).) - -### Other Normalization - -Normalization is also applied to quoted-string local parts and domain -literal IPv6 addresses if you have allowed them by the `allow_quoted_local` -and `allow_domain_literal` options. In quoted-string local parts, unnecessary -backslash escaping is removed and even the surrounding quotes are removed if -they are unnecessary. For IPv6 domain literals, the IPv6 address is -normalized to condensed form. [RFC 2142](https://datatracker.ietf.org/doc/html/rfc2142) -also requires lowercase normalization for some specific mailbox names like `postmaster@`. - - -Examples --------- - -For the email address `test@joshdata.me`, the returned object is: - -```python -ValidatedEmail( - normalized='test@joshdata.me', - local_part='test', - domain='joshdata.me', - ascii_email='test@joshdata.me', - ascii_local_part='test', - ascii_domain='joshdata.me', - smtputf8=False) -``` - -For the fictitious but valid address `example@ツ.ⓁⒾⒻⒺ`, which has an -internationalized domain but ASCII local part, the returned object is: - -```python -ValidatedEmail( - normalized='example@ツ.life', - local_part='example', - domain='ツ.life', - ascii_email='example@xn--bdk.life', - ascii_local_part='example', - ascii_domain='xn--bdk.life', - smtputf8=False) - -``` - -Note that `normalized` and other fields provide a normalized form of the -email address, domain name, and (in other cases) local part (see earlier -discussion of normalization), which you should use in your database. - -Calling `validate_email` with the ASCII form of the above email address, -`example@xn--bdk.life`, returns the exact same information (i.e., the -`normalized` field always will contain Unicode characters, not Punycode). - -For the fictitious address `ツ-test@joshdata.me`, which has an -internationalized local part, the returned object is: - -```python -ValidatedEmail( - normalized='ツ-test@joshdata.me', - local_part='ツ-test', - domain='joshdata.me', - ascii_email=None, - ascii_local_part=None, - ascii_domain='joshdata.me', - smtputf8=True) -``` - -Now `smtputf8` is `True` and `ascii_email` is `None` because the local -part of the address is internationalized. The `local_part` and `normalized` fields -return the normalized form of the address. - -Return value ------------- - -When an email address passes validation, the fields in the returned object -are: - -| Field | Value | -| -----:|-------| -| `normalized` | The normalized form of the email address that you should put in your database. This combines the `local_part` and `domain` fields (see below). | -| `ascii_email` | If set, an ASCII-only form of the normalized email address by replacing the domain part with [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt). This field will be present when an ASCII-only form of the email address exists (including if the email address is already ASCII). If the local part of the email address contains internationalized characters, `ascii_email` will be `None`. If set, it merely combines `ascii_local_part` and `ascii_domain`. | -| `local_part` | The normalized local part of the given email address (before the @-sign). Normalization includes Unicode NFC normalization and removing unnecessary quoted-string quotes and backslashes. If `allow_quoted_local` is True and the surrounding quotes are necessary, the quotes _will_ be present in this field. | -| `ascii_local_part` | If set, the local part, which is composed of ASCII characters only. | -| `domain` | The canonical internationalized Unicode form of the domain part of the email address. If the returned string contains non-ASCII characters, either the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit the message or else the email address's domain part must be converted to IDNA ASCII first: Use `ascii_domain` field instead. | -| `ascii_domain` | The [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)-encoded form of the domain part of the given email address, as it would be transmitted on the wire. | -| `domain_address` | If domain literals are allowed and if the email address contains one, an `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object. | -| `display_name` | If no display name was present and angle brackets do not surround the address, this will be `None`; otherwise, it will be set to the display name, or the empty string if there were angle brackets but no display name. If the display name was quoted, it will be unquoted and unescaped. | -| `smtputf8` | A boolean indicating that the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit messages to this address because the local part of the address has non-ASCII characters (the local part cannot be IDNA-encoded). If `allow_smtputf8=False` is passed as an argument, this flag will always be false because an exception is raised if it would have been true. | -| `mx` | A list of (priority, domain) tuples of MX records specified in the DNS for the domain (see [RFC 5321 section 5](https://tools.ietf.org/html/rfc5321#section-5)). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | -| `mx_fallback_type` | `None` if an `MX` record is found. If no MX records are actually specified in DNS and instead are inferred, through an obsolete mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | -| `spf` | Any SPF record found while checking deliverability. Only set if the SPF record is queried. | - -Assumptions ------------ - -By design, this validator does not pass all email addresses that -strictly conform to the standards. Many email address forms are obsolete -or likely to cause trouble: - -* The validator assumes the email address is intended to be - usable on the public Internet. The domain part - of the email address must be a resolvable domain name - (see the deliverability checks described above). - Most [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) - and their subdomains, as well as - domain names without a `.`, are rejected as a syntax error - (except see the `test_environment` parameter above). -* Obsolete email syntaxes are rejected: - The unusual ["(comment)" syntax](https://github.com/JoshData/python-email-validator/issues/77) - is rejected. Extremely old obsolete syntaxes are - rejected. Quoted-string local parts and domain-literal addresses - are rejected by default, but there are options to allow them (see above). - No one uses these forms anymore, and I can't think of any reason why anyone - using this library would need to accept them. - -Testing -------- - -Tests can be run using - -```sh -pip install -r test_requirements.txt -make test -``` - -Tests run with mocked DNS responses. When adding or changing tests, temporarily turn on the `BUILD_MOCKED_DNS_RESPONSE_DATA` flag in `tests/mocked_dns_responses.py` to re-build the database of mocked responses from live queries. - -For Project Maintainers ------------------------ - -The package is distributed as a universal wheel and as a source package. - -To release: - -* Update CHANGELOG.md. -* Update the version number in `email_validator/version.py`. -* Make & push a commit with the new version number and make sure tests pass. -* Make & push a tag (see command below). -* Make a release at https://github.com/JoshData/python-email-validator/releases/new. -* Publish a source and wheel distribution to pypi (see command below). - -```sh -git tag v$(cat email_validator/version.py | sed "s/.* = //" | sed 's/"//g') -git push --tags -./release_to_pypi.sh -``` - -License -------- - -This project is free of any copyright restrictions per the [Unlicense](https://unlicense.org/). (Prior to Feb. 4, 2024, the project was made available under the terms of the [CC0 1.0 Universal public domain dedication](http://creativecommons.org/publicdomain/zero/1.0/).) See [LICENSE](LICENSE) and [CONTRIBUTING.md](CONTRIBUTING.md). diff --git a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/RECORD deleted file mode 100644 index 8310e327..00000000 --- a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/RECORD +++ /dev/null @@ -1,25 +0,0 @@ -../../../bin/email_validator,sha256=7qHeq2LRM7XBWIyghShhUrENeKWOuYQ317RoIJTVvbc,251 -email_validator-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -email_validator-2.2.0.dist-info/LICENSE,sha256=ZyF5dS4QkTSj-yvdB4Cyn9t6A5dPD1hqE66tUSlWLUw,1212 -email_validator-2.2.0.dist-info/METADATA,sha256=vELkkg-p-qMuqNFX6uzDmMaruT7Pe5PDAQexHLAB4XM,25741 -email_validator-2.2.0.dist-info/RECORD,, -email_validator-2.2.0.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91 -email_validator-2.2.0.dist-info/entry_points.txt,sha256=zRM_6bNIUSHTbNx5u6M3nK1MAguvryrc9hICC6HyrBg,66 -email_validator-2.2.0.dist-info/top_level.txt,sha256=fYDOSWFZke46ut7WqdOAJjjhlpPYAaOwOwIsh3s8oWI,16 -email_validator/__init__.py,sha256=g-TFM6vzpEt4dMG93giGlS343yXXXIy7EOLNFEn6DfA,4360 -email_validator/__main__.py,sha256=TIvjaG_OSFRciH0J2pnEJEdX3uJy3ZgocmasEqh9EEI,2243 -email_validator/__pycache__/__init__.cpython-312.pyc,, -email_validator/__pycache__/__main__.cpython-312.pyc,, -email_validator/__pycache__/deliverability.cpython-312.pyc,, -email_validator/__pycache__/exceptions_types.cpython-312.pyc,, -email_validator/__pycache__/rfc_constants.cpython-312.pyc,, -email_validator/__pycache__/syntax.cpython-312.pyc,, -email_validator/__pycache__/validate_email.cpython-312.pyc,, -email_validator/__pycache__/version.cpython-312.pyc,, -email_validator/deliverability.py,sha256=e6eODNSaLMiM29EZ3bWYDFkQDlMIdicBaykjYQJwYig,7222 -email_validator/exceptions_types.py,sha256=yLxXqwtl5dXa-938K7skLP1pMFgi0oovzCs74mX7TGs,6024 -email_validator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -email_validator/rfc_constants.py,sha256=KVUshwIu699cle3UzDU2_fFBSQOO7p91Z_hrlNANtGM,2767 -email_validator/syntax.py,sha256=Mo5KLgEsbQcvNzs8zO5QbhzUK4MAjL9yJFDpwsF12lY,36005 -email_validator/validate_email.py,sha256=YUXY5Sv_mQ7Vuu_AmGdISza8v-VaABnNMLrlWv8EIl4,8401 -email_validator/version.py,sha256=DKk-1b-rZsJFxFi1JoJ7TmEvIEQ0rf-C9HAZWwvjuM0,22 diff --git a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/WHEEL deleted file mode 100644 index 9086d271..00000000 --- a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (70.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/entry_points.txt deleted file mode 100644 index 03c6e230..00000000 --- a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -email_validator = email_validator.__main__:main diff --git a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/top_level.txt deleted file mode 100644 index 798fd5ef..00000000 --- a/venv/lib/python3.12/site-packages/email_validator-2.2.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -email_validator diff --git a/venv/lib/python3.12/site-packages/email_validator/__init__.py b/venv/lib/python3.12/site-packages/email_validator/__init__.py deleted file mode 100644 index 626aa002..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/__init__.py +++ /dev/null @@ -1,101 +0,0 @@ -from typing import TYPE_CHECKING - -# Export the main method, helper methods, and the public data types. -from .exceptions_types import ValidatedEmail, EmailNotValidError, \ - EmailSyntaxError, EmailUndeliverableError -from .validate_email import validate_email -from .version import __version__ - -__all__ = ["validate_email", - "ValidatedEmail", "EmailNotValidError", - "EmailSyntaxError", "EmailUndeliverableError", - "caching_resolver", "__version__"] - -if TYPE_CHECKING: - from .deliverability import caching_resolver -else: - def caching_resolver(*args, **kwargs): - # Lazy load `deliverability` as it is slow to import (due to dns.resolver) - from .deliverability import caching_resolver - - return caching_resolver(*args, **kwargs) - - -# These global attributes are a part of the library's API and can be -# changed by library users. - -# Default values for keyword arguments. - -ALLOW_SMTPUTF8 = True -ALLOW_QUOTED_LOCAL = False -ALLOW_DOMAIN_LITERAL = False -ALLOW_DISPLAY_NAME = False -GLOBALLY_DELIVERABLE = True -CHECK_DELIVERABILITY = True -TEST_ENVIRONMENT = False -DEFAULT_TIMEOUT = 15 # secs - -# IANA Special Use Domain Names -# Last Updated 2021-09-21 -# https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.txt -# -# The domain names without dots would be caught by the check that the domain -# name in an email address must have a period, but this list will also catch -# subdomains of these domains, which are also reserved. -SPECIAL_USE_DOMAIN_NAMES = [ - # The "arpa" entry here is consolidated from a lot of arpa subdomains - # for private address (i.e. non-routable IP addresses like 172.16.x.x) - # reverse mapping, plus some other subdomains. Although RFC 6761 says - # that application software should not treat these domains as special, - # they are private-use domains and so cannot have globally deliverable - # email addresses, which is an assumption of this library, and probably - # all of arpa is similarly special-use, so we reject it all. - "arpa", - - # RFC 6761 says applications "SHOULD NOT" treat the "example" domains - # as special, i.e. applications should accept these domains. - # - # The domain "example" alone fails our syntax validation because it - # lacks a dot (we assume no one has an email address on a TLD directly). - # "@example.com/net/org" will currently fail DNS-based deliverability - # checks because IANA publishes a NULL MX for these domains, and - # "@mail.example[.com/net/org]" and other subdomains will fail DNS- - # based deliverability checks because IANA does not publish MX or A - # DNS records for these subdomains. - # "example", # i.e. "wwww.example" - # "example.com", - # "example.net", - # "example.org", - - # RFC 6761 says that applications are permitted to treat this domain - # as special and that DNS should return an immediate negative response, - # so we also immediately reject this domain, which also follows the - # purpose of the domain. - "invalid", - - # RFC 6762 says that applications "may" treat ".local" as special and - # that "name resolution APIs and libraries SHOULD recognize these names - # as special," and since ".local" has no global definition, we reject - # it, as we expect email addresses to be gloally routable. - "local", - - # RFC 6761 says that applications (like this library) are permitted - # to treat "localhost" as special, and since it cannot have a globally - # deliverable email address, we reject it. - "localhost", - - # RFC 7686 says "applications that do not implement the Tor protocol - # SHOULD generate an error upon the use of .onion and SHOULD NOT - # perform a DNS lookup. - "onion", - - # Although RFC 6761 says that application software should not treat - # these domains as special, it also warns users that the address may - # resolve differently in different systems, and therefore it cannot - # have a globally routable email address, which is an assumption of - # this library, so we reject "@test" and "@*.test" addresses, unless - # the test_environment keyword argument is given, to allow their use - # in application-level test environments. These domains will generally - # fail deliverability checks because "test" is not an actual TLD. - "test", -] diff --git a/venv/lib/python3.12/site-packages/email_validator/__main__.py b/venv/lib/python3.12/site-packages/email_validator/__main__.py deleted file mode 100644 index 52791c75..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/__main__.py +++ /dev/null @@ -1,60 +0,0 @@ -# A command-line tool for testing. -# -# Usage: -# -# python -m email_validator test@example.org -# python -m email_validator < LIST_OF_ADDRESSES.TXT -# -# Provide email addresses to validate either as a command-line argument -# or in STDIN separated by newlines. Validation errors will be printed for -# invalid email addresses. When passing an email address on the command -# line, if the email address is valid, information about it will be printed. -# When using STDIN, no output will be given for valid email addresses. -# -# Keyword arguments to validate_email can be set in environment variables -# of the same name but upprcase (see below). - -import json -import os -import sys -from typing import Any, Dict, Optional - -from .validate_email import validate_email, _Resolver -from .deliverability import caching_resolver -from .exceptions_types import EmailNotValidError - - -def main(dns_resolver: Optional[_Resolver] = None) -> None: - # The dns_resolver argument is for tests. - - # Set options from environment variables. - options: Dict[str, Any] = {} - for varname in ('ALLOW_SMTPUTF8', 'ALLOW_QUOTED_LOCAL', 'ALLOW_DOMAIN_LITERAL', - 'GLOBALLY_DELIVERABLE', 'CHECK_DELIVERABILITY', 'TEST_ENVIRONMENT'): - if varname in os.environ: - options[varname.lower()] = bool(os.environ[varname]) - for varname in ('DEFAULT_TIMEOUT',): - if varname in os.environ: - options[varname.lower()] = float(os.environ[varname]) - - if len(sys.argv) == 1: - # Validate the email addresses pased line-by-line on STDIN. - dns_resolver = dns_resolver or caching_resolver() - for line in sys.stdin: - email = line.strip() - try: - validate_email(email, dns_resolver=dns_resolver, **options) - except EmailNotValidError as e: - print(f"{email} {e}") - else: - # Validate the email address passed on the command line. - email = sys.argv[1] - try: - result = validate_email(email, dns_resolver=dns_resolver, **options) - print(json.dumps(result.as_dict(), indent=2, sort_keys=True, ensure_ascii=False)) - except EmailNotValidError as e: - print(e) - - -if __name__ == "__main__": - main() diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 4aa01c37..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index c7d98476..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/deliverability.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/deliverability.cpython-312.pyc deleted file mode 100644 index 9c41df37..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/deliverability.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/exceptions_types.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/exceptions_types.cpython-312.pyc deleted file mode 100644 index 55fd0389..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/exceptions_types.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/rfc_constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/rfc_constants.cpython-312.pyc deleted file mode 100644 index 01ac1c47..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/rfc_constants.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/syntax.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/syntax.cpython-312.pyc deleted file mode 100644 index e77aad80..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/syntax.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/validate_email.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/validate_email.cpython-312.pyc deleted file mode 100644 index 0b060c19..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/validate_email.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/email_validator/__pycache__/version.cpython-312.pyc deleted file mode 100644 index 22a6253b..00000000 Binary files a/venv/lib/python3.12/site-packages/email_validator/__pycache__/version.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/email_validator/deliverability.py b/venv/lib/python3.12/site-packages/email_validator/deliverability.py deleted file mode 100644 index 90f5f9af..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/deliverability.py +++ /dev/null @@ -1,159 +0,0 @@ -from typing import Any, List, Optional, Tuple, TypedDict - -import ipaddress - -from .exceptions_types import EmailUndeliverableError - -import dns.resolver -import dns.exception - - -def caching_resolver(*, timeout: Optional[int] = None, cache: Any = None, dns_resolver: Optional[dns.resolver.Resolver] = None) -> dns.resolver.Resolver: - if timeout is None: - from . import DEFAULT_TIMEOUT - timeout = DEFAULT_TIMEOUT - resolver = dns_resolver or dns.resolver.Resolver() - resolver.cache = cache or dns.resolver.LRUCache() - resolver.lifetime = timeout # timeout, in seconds - return resolver - - -DeliverabilityInfo = TypedDict("DeliverabilityInfo", { - "mx": List[Tuple[int, str]], - "mx_fallback_type": Optional[str], - "unknown-deliverability": str, -}, total=False) - - -def validate_email_deliverability(domain: str, domain_i18n: str, timeout: Optional[int] = None, dns_resolver: Optional[dns.resolver.Resolver] = None) -> DeliverabilityInfo: - # Check that the domain resolves to an MX record. If there is no MX record, - # try an A or AAAA record which is a deprecated fallback for deliverability. - # Raises an EmailUndeliverableError on failure. On success, returns a dict - # with deliverability information. - - # If no dns.resolver.Resolver was given, get dnspython's default resolver. - # Override the default resolver's timeout. This may affect other uses of - # dnspython in this process. - if dns_resolver is None: - from . import DEFAULT_TIMEOUT - if timeout is None: - timeout = DEFAULT_TIMEOUT - dns_resolver = dns.resolver.get_default_resolver() - dns_resolver.lifetime = timeout - elif timeout is not None: - raise ValueError("It's not valid to pass both timeout and dns_resolver.") - - deliverability_info: DeliverabilityInfo = {} - - try: - try: - # Try resolving for MX records (RFC 5321 Section 5). - response = dns_resolver.resolve(domain, "MX") - - # For reporting, put them in priority order and remove the trailing dot in the qnames. - mtas = sorted([(r.preference, str(r.exchange).rstrip('.')) for r in response]) - - # RFC 7505: Null MX (0, ".") records signify the domain does not accept email. - # Remove null MX records from the mtas list (but we've stripped trailing dots, - # so the 'exchange' is just "") so we can check if there are no non-null MX - # records remaining. - mtas = [(preference, exchange) for preference, exchange in mtas - if exchange != ""] - if len(mtas) == 0: # null MX only, if there were no MX records originally a NoAnswer exception would have occurred - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") - - deliverability_info["mx"] = mtas - deliverability_info["mx_fallback_type"] = None - - except dns.resolver.NoAnswer: - # If there was no MX record, fall back to an A or AAA record - # (RFC 5321 Section 5). Check A first since it's more common. - - # If the A/AAAA response has no Globally Reachable IP address, - # treat the response as if it were NoAnswer, i.e., the following - # address types are not allowed fallbacks: Private-Use, Loopback, - # Link-Local, and some other obscure ranges. See - # https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml - # https://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml - # (Issue #134.) - def is_global_addr(address: Any) -> bool: - try: - ipaddr = ipaddress.ip_address(address) - except ValueError: - return False - return ipaddr.is_global - - try: - response = dns_resolver.resolve(domain, "A") - - if not any(is_global_addr(r.address) for r in response): - raise dns.resolver.NoAnswer # fall back to AAAA - - deliverability_info["mx"] = [(0, domain)] - deliverability_info["mx_fallback_type"] = "A" - - except dns.resolver.NoAnswer: - - # If there was no A record, fall back to an AAAA record. - # (It's unclear if SMTP servers actually do this.) - try: - response = dns_resolver.resolve(domain, "AAAA") - - if not any(is_global_addr(r.address) for r in response): - raise dns.resolver.NoAnswer - - deliverability_info["mx"] = [(0, domain)] - deliverability_info["mx_fallback_type"] = "AAAA" - - except dns.resolver.NoAnswer as e: - # If there was no MX, A, or AAAA record, then mail to - # this domain is not deliverable, although the domain - # name has other records (otherwise NXDOMAIN would - # have been raised). - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") from e - - # Check for a SPF (RFC 7208) reject-all record ("v=spf1 -all") which indicates - # no emails are sent from this domain (similar to a Null MX record - # but for sending rather than receiving). In combination with the - # absence of an MX record, this is probably a good sign that the - # domain is not used for email. - try: - response = dns_resolver.resolve(domain, "TXT") - for rec in response: - value = b"".join(rec.strings) - if value.startswith(b"v=spf1 "): - if value == b"v=spf1 -all": - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not send email.") - except dns.resolver.NoAnswer: - # No TXT records means there is no SPF policy, so we cannot take any action. - pass - - except dns.resolver.NXDOMAIN as e: - # The domain name does not exist --- there are no records of any sort - # for the domain name. - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not exist.") from e - - except dns.resolver.NoNameservers: - # All nameservers failed to answer the query. This might be a problem - # with local nameservers, maybe? We'll allow the domain to go through. - return { - "unknown-deliverability": "no_nameservers", - } - - except dns.exception.Timeout: - # A timeout could occur for various reasons, so don't treat it as a failure. - return { - "unknown-deliverability": "timeout", - } - - except EmailUndeliverableError: - # Don't let these get clobbered by the wider except block below. - raise - - except Exception as e: - # Unhandled conditions should not propagate. - raise EmailUndeliverableError( - "There was an error while checking if the domain name in the email address is deliverable: " + str(e) - ) from e - - return deliverability_info diff --git a/venv/lib/python3.12/site-packages/email_validator/exceptions_types.py b/venv/lib/python3.12/site-packages/email_validator/exceptions_types.py deleted file mode 100644 index 928a94fc..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/exceptions_types.py +++ /dev/null @@ -1,141 +0,0 @@ -import warnings -from typing import Any, Dict, List, Optional, Tuple, Union - - -class EmailNotValidError(ValueError): - """Parent class of all exceptions raised by this module.""" - pass - - -class EmailSyntaxError(EmailNotValidError): - """Exception raised when an email address fails validation because of its form.""" - pass - - -class EmailUndeliverableError(EmailNotValidError): - """Exception raised when an email address fails validation because its domain name does not appear deliverable.""" - pass - - -class ValidatedEmail: - """The validate_email function returns objects of this type holding the normalized form of the email address - and other information.""" - - """The email address that was passed to validate_email. (If passed as bytes, this will be a string.)""" - original: str - - """The normalized email address, which should always be used in preference to the original address. - The normalized address converts an IDNA ASCII domain name to Unicode, if possible, and performs - Unicode normalization on the local part and on the domain (if originally Unicode). It is the - concatenation of the local_part and domain attributes, separated by an @-sign.""" - normalized: str - - """The local part of the email address after Unicode normalization.""" - local_part: str - - """The domain part of the email address after Unicode normalization or conversion to - Unicode from IDNA ascii.""" - domain: str - - """If the domain part is a domain literal, the IPv4Address or IPv6Address object.""" - domain_address: object - - """If not None, a form of the email address that uses 7-bit ASCII characters only.""" - ascii_email: Optional[str] - - """If not None, the local part of the email address using 7-bit ASCII characters only.""" - ascii_local_part: Optional[str] - - """A form of the domain name that uses 7-bit ASCII characters only.""" - ascii_domain: str - - """If True, the SMTPUTF8 feature of your mail relay will be required to transmit messages - to this address. This flag is True just when ascii_local_part is missing. Otherwise it - is False.""" - smtputf8: bool - - """If a deliverability check is performed and if it succeeds, a list of (priority, domain) - tuples of MX records specified in the DNS for the domain.""" - mx: List[Tuple[int, str]] - - """If no MX records are actually specified in DNS and instead are inferred, through an obsolete - mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`).""" - mx_fallback_type: Optional[str] - - """The display name in the original input text, unquoted and unescaped, or None.""" - display_name: Optional[str] - - def __repr__(self) -> str: - return f"" - - """For backwards compatibility, support old field names.""" - def __getattr__(self, key: str) -> str: - if key == "original_email": - return self.original - if key == "email": - return self.normalized - raise AttributeError(key) - - @property - def email(self) -> str: - warnings.warn("ValidatedEmail.email is deprecated and will be removed, use ValidatedEmail.normalized instead", DeprecationWarning) - return self.normalized - - """For backwards compatibility, some fields are also exposed through a dict-like interface. Note - that some of the names changed when they became attributes.""" - def __getitem__(self, key: str) -> Union[Optional[str], bool, List[Tuple[int, str]]]: - warnings.warn("dict-like access to the return value of validate_email is deprecated and may not be supported in the future.", DeprecationWarning, stacklevel=2) - if key == "email": - return self.normalized - if key == "email_ascii": - return self.ascii_email - if key == "local": - return self.local_part - if key == "domain": - return self.ascii_domain - if key == "domain_i18n": - return self.domain - if key == "smtputf8": - return self.smtputf8 - if key == "mx": - return self.mx - if key == "mx-fallback": - return self.mx_fallback_type - raise KeyError() - - """Tests use this.""" - def __eq__(self, other: object) -> bool: - if not isinstance(other, ValidatedEmail): - return False - return ( - self.normalized == other.normalized - and self.local_part == other.local_part - and self.domain == other.domain - and getattr(self, 'ascii_email', None) == getattr(other, 'ascii_email', None) - and getattr(self, 'ascii_local_part', None) == getattr(other, 'ascii_local_part', None) - and getattr(self, 'ascii_domain', None) == getattr(other, 'ascii_domain', None) - and self.smtputf8 == other.smtputf8 - and repr(sorted(self.mx) if getattr(self, 'mx', None) else None) - == repr(sorted(other.mx) if getattr(other, 'mx', None) else None) - and getattr(self, 'mx_fallback_type', None) == getattr(other, 'mx_fallback_type', None) - and getattr(self, 'display_name', None) == getattr(other, 'display_name', None) - ) - - """This helps producing the README.""" - def as_constructor(self) -> str: - return "ValidatedEmail(" \ - + ",".join(f"\n {key}={repr(getattr(self, key))}" - for key in ('normalized', 'local_part', 'domain', - 'ascii_email', 'ascii_local_part', 'ascii_domain', - 'smtputf8', 'mx', 'mx_fallback_type', - 'display_name') - if hasattr(self, key) - ) \ - + ")" - - """Convenience method for accessing ValidatedEmail as a dict""" - def as_dict(self) -> Dict[str, Any]: - d = self.__dict__ - if d.get('domain_address'): - d['domain_address'] = repr(d['domain_address']) - return d diff --git a/venv/lib/python3.12/site-packages/email_validator/py.typed b/venv/lib/python3.12/site-packages/email_validator/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/email_validator/rfc_constants.py b/venv/lib/python3.12/site-packages/email_validator/rfc_constants.py deleted file mode 100644 index 39d8e315..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/rfc_constants.py +++ /dev/null @@ -1,51 +0,0 @@ -# These constants are defined by the email specifications. - -import re - -# Based on RFC 5322 3.2.3, these characters are permitted in email -# addresses (not taking into account internationalization) separated by dots: -ATEXT = r'a-zA-Z0-9_!#\$%&\'\*\+\-/=\?\^`\{\|\}~' -ATEXT_RE = re.compile('[.' + ATEXT + ']') # ATEXT plus dots -DOT_ATOM_TEXT = re.compile('[' + ATEXT + ']+(?:\\.[' + ATEXT + r']+)*\Z') - -# RFC 6531 3.3 extends the allowed characters in internationalized -# addresses to also include three specific ranges of UTF8 defined in -# RFC 3629 section 4, which appear to be the Unicode code points from -# U+0080 to U+10FFFF. -ATEXT_INTL = ATEXT + "\u0080-\U0010FFFF" -ATEXT_INTL_DOT_RE = re.compile('[.' + ATEXT_INTL + ']') # ATEXT_INTL plus dots -DOT_ATOM_TEXT_INTL = re.compile('[' + ATEXT_INTL + ']+(?:\\.[' + ATEXT_INTL + r']+)*\Z') - -# The domain part of the email address, after IDNA (ASCII) encoding, -# must also satisfy the requirements of RFC 952/RFC 1123 2.1 which -# restrict the allowed characters of hostnames further. -ATEXT_HOSTNAME_INTL = re.compile(r"[a-zA-Z0-9\-\." + "\u0080-\U0010FFFF" + "]") -HOSTNAME_LABEL = r'(?:(?:[a-zA-Z0-9][a-zA-Z0-9\-]*)?[a-zA-Z0-9])' -DOT_ATOM_TEXT_HOSTNAME = re.compile(HOSTNAME_LABEL + r'(?:\.' + HOSTNAME_LABEL + r')*\Z') -DOMAIN_NAME_REGEX = re.compile(r"[A-Za-z]\Z") # all TLDs currently end with a letter - -# Domain literal (RFC 5322 3.4.1) -DOMAIN_LITERAL_CHARS = re.compile(r"[\u0021-\u00FA\u005E-\u007E]") - -# Quoted-string local part (RFC 5321 4.1.2, internationalized by RFC 6531 3.3) -# The permitted characters in a quoted string are the characters in the range -# 32-126, except that quotes and (literal) backslashes can only appear when escaped -# by a backslash. When internationalized, UTF-8 strings are also permitted except -# the ASCII characters that are not previously permitted (see above). -# QUOTED_LOCAL_PART_ADDR = re.compile(r"^\"((?:[\u0020-\u0021\u0023-\u005B\u005D-\u007E]|\\[\u0020-\u007E])*)\"@(.*)") -QTEXT_INTL = re.compile(r"[\u0020-\u007E\u0080-\U0010FFFF]") - -# Length constants -# RFC 3696 + errata 1003 + errata 1690 (https://www.rfc-editor.org/errata_search.php?rfc=3696&eid=1690) -# explains the maximum length of an email address is 254 octets. -EMAIL_MAX_LENGTH = 254 -LOCAL_PART_MAX_LENGTH = 64 -DNS_LABEL_LENGTH_LIMIT = 63 # in "octets", RFC 1035 2.3.1 -DOMAIN_MAX_LENGTH = 253 # in "octets" as transmitted, RFC 1035 2.3.4 and RFC 5321 4.5.3.1.2, and see https://stackoverflow.com/questions/32290167/what-is-the-maximum-length-of-a-dns-name - -# RFC 2142 -CASE_INSENSITIVE_MAILBOX_NAMES = [ - 'info', 'marketing', 'sales', 'support', # section 3 - 'abuse', 'noc', 'security', # section 4 - 'postmaster', 'hostmaster', 'usenet', 'news', 'webmaster', 'www', 'uucp', 'ftp', # section 5 -] diff --git a/venv/lib/python3.12/site-packages/email_validator/syntax.py b/venv/lib/python3.12/site-packages/email_validator/syntax.py deleted file mode 100644 index c6554518..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/syntax.py +++ /dev/null @@ -1,761 +0,0 @@ -from .exceptions_types import EmailSyntaxError, ValidatedEmail -from .rfc_constants import EMAIL_MAX_LENGTH, LOCAL_PART_MAX_LENGTH, DOMAIN_MAX_LENGTH, \ - DOT_ATOM_TEXT, DOT_ATOM_TEXT_INTL, ATEXT_RE, ATEXT_INTL_DOT_RE, ATEXT_HOSTNAME_INTL, QTEXT_INTL, \ - DNS_LABEL_LENGTH_LIMIT, DOT_ATOM_TEXT_HOSTNAME, DOMAIN_NAME_REGEX, DOMAIN_LITERAL_CHARS - -import re -import unicodedata -import idna # implements IDNA 2008; Python's codec is only IDNA 2003 -import ipaddress -from typing import Optional, Tuple, TypedDict, Union - - -def split_email(email: str) -> Tuple[Optional[str], str, str, bool]: - # Return the display name, unescaped local part, and domain part - # of the address, and whether the local part was quoted. If no - # display name was present and angle brackets do not surround - # the address, display name will be None; otherwise, it will be - # set to the display name or the empty string if there were - # angle brackets but no display name. - - # Typical email addresses have a single @-sign and no quote - # characters, but the awkward "quoted string" local part form - # (RFC 5321 4.1.2) allows @-signs and escaped quotes to appear - # in the local part if the local part is quoted. - - # A `display name ` format is also present in MIME messages - # (RFC 5322 3.4) and this format is also often recognized in - # mail UIs. It's not allowed in SMTP commands or in typical web - # login forms, but parsing it has been requested, so it's done - # here as a convenience. It's implemented in the spirit but not - # the letter of RFC 5322 3.4 because MIME messages allow newlines - # and comments as a part of the CFWS rule, but this is typically - # not allowed in mail UIs (although comment syntax was requested - # once too). - # - # Display names are either basic characters (the same basic characters - # permitted in email addresses, but periods are not allowed and spaces - # are allowed; see RFC 5322 Appendix A.1.2), or or a quoted string with - # the same rules as a quoted local part. (Multiple quoted strings might - # be allowed? Unclear.) Optional space (RFC 5322 3.4 CFWS) and then the - # email address follows in angle brackets. - # - # An initial quote is ambiguous between starting a display name or - # a quoted local part --- fun. - # - # We assume the input string is already stripped of leading and - # trailing CFWS. - - def split_string_at_unquoted_special(text: str, specials: Tuple[str, ...]) -> Tuple[str, str]: - # Split the string at the first character in specials (an @-sign - # or left angle bracket) that does not occur within quotes and - # is not followed by a Unicode combining character. - # If no special character is found, raise an error. - inside_quote = False - escaped = False - left_part = "" - for i, c in enumerate(text): - # < plus U+0338 (Combining Long Solidus Overlay) normalizes to - # ≮ U+226E (Not Less-Than), and it would be confusing to treat - # the < as the start of "" syntax in that case. Liekwise, - # if anything combines with an @ or ", we should probably not - # treat it as a special character. - if unicodedata.normalize("NFC", text[i:])[0] != c: - left_part += c - - elif inside_quote: - left_part += c - if c == '\\' and not escaped: - escaped = True - elif c == '"' and not escaped: - # The only way to exit the quote is an unescaped quote. - inside_quote = False - escaped = False - else: - escaped = False - elif c == '"': - left_part += c - inside_quote = True - elif c in specials: - # When unquoted, stop before a special character. - break - else: - left_part += c - - if len(left_part) == len(text): - raise EmailSyntaxError("An email address must have an @-sign.") - - # The right part is whatever is left. - right_part = text[len(left_part):] - - return left_part, right_part - - def unquote_quoted_string(text: str) -> Tuple[str, bool]: - # Remove surrounding quotes and unescape escaped backslashes - # and quotes. Escapes are parsed liberally. I think only - # backslashes and quotes can be escaped but we'll allow anything - # to be. - quoted = False - escaped = False - value = "" - for i, c in enumerate(text): - if quoted: - if escaped: - value += c - escaped = False - elif c == '\\': - escaped = True - elif c == '"': - if i != len(text) - 1: - raise EmailSyntaxError("Extra character(s) found after close quote: " - + ", ".join(safe_character_display(c) for c in text[i + 1:])) - break - else: - value += c - elif i == 0 and c == '"': - quoted = True - else: - value += c - - return value, quoted - - # Split the string at the first unquoted @-sign or left angle bracket. - left_part, right_part = split_string_at_unquoted_special(email, ("@", "<")) - - # If the right part starts with an angle bracket, - # then the left part is a display name and the rest - # of the right part up to the final right angle bracket - # is the email address, . - if right_part.startswith("<"): - # Remove space between the display name and angle bracket. - left_part = left_part.rstrip() - - # Unquote and unescape the display name. - display_name, display_name_quoted = unquote_quoted_string(left_part) - - # Check that only basic characters are present in a - # non-quoted display name. - if not display_name_quoted: - bad_chars = { - safe_character_display(c) - for c in display_name - if (not ATEXT_RE.match(c) and c != ' ') or c == '.' - } - if bad_chars: - raise EmailSyntaxError("The display name contains invalid characters when not quoted: " + ", ".join(sorted(bad_chars)) + ".") - - # Check for other unsafe characters. - check_unsafe_chars(display_name, allow_space=True) - - # Check that the right part ends with an angle bracket - # but allow spaces after it, I guess. - if ">" not in right_part: - raise EmailSyntaxError("An open angle bracket at the start of the email address has to be followed by a close angle bracket at the end.") - right_part = right_part.rstrip(" ") - if right_part[-1] != ">": - raise EmailSyntaxError("There can't be anything after the email address.") - - # Remove the initial and trailing angle brackets. - addr_spec = right_part[1:].rstrip(">") - - # Split the email address at the first unquoted @-sign. - local_part, domain_part = split_string_at_unquoted_special(addr_spec, ("@",)) - - # Otherwise there is no display name. The left part is the local - # part and the right part is the domain. - else: - display_name = None - local_part, domain_part = left_part, right_part - - if domain_part.startswith("@"): - domain_part = domain_part[1:] - - # Unquote the local part if it is quoted. - local_part, is_quoted_local_part = unquote_quoted_string(local_part) - - return display_name, local_part, domain_part, is_quoted_local_part - - -def get_length_reason(addr: str, limit: int) -> str: - """Helper function to return an error message related to invalid length.""" - diff = len(addr) - limit - suffix = "s" if diff > 1 else "" - return f"({diff} character{suffix} too many)" - - -def safe_character_display(c: str) -> str: - # Return safely displayable characters in quotes. - if c == '\\': - return f"\"{c}\"" # can't use repr because it escapes it - if unicodedata.category(c)[0] in ("L", "N", "P", "S"): - return repr(c) - - # Construct a hex string in case the unicode name doesn't exist. - if ord(c) < 0xFFFF: - h = f"U+{ord(c):04x}".upper() - else: - h = f"U+{ord(c):08x}".upper() - - # Return the character name or, if it has no name, the hex string. - return unicodedata.name(c, h) - - -class LocalPartValidationResult(TypedDict): - local_part: str - ascii_local_part: Optional[str] - smtputf8: bool - - -def validate_email_local_part(local: str, allow_smtputf8: bool = True, allow_empty_local: bool = False, - quoted_local_part: bool = False) -> LocalPartValidationResult: - """Validates the syntax of the local part of an email address.""" - - if len(local) == 0: - if not allow_empty_local: - raise EmailSyntaxError("There must be something before the @-sign.") - - # The caller allows an empty local part. Useful for validating certain - # Postfix aliases. - return { - "local_part": local, - "ascii_local_part": local, - "smtputf8": False, - } - - # Check the length of the local part by counting characters. - # (RFC 5321 4.5.3.1.1) - # We're checking the number of characters here. If the local part - # is ASCII-only, then that's the same as bytes (octets). If it's - # internationalized, then the UTF-8 encoding may be longer, but - # that may not be relevant. We will check the total address length - # instead. - if len(local) > LOCAL_PART_MAX_LENGTH: - reason = get_length_reason(local, limit=LOCAL_PART_MAX_LENGTH) - raise EmailSyntaxError(f"The email address is too long before the @-sign {reason}.") - - # Check the local part against the non-internationalized regular expression. - # Most email addresses match this regex so it's probably fastest to check this first. - # (RFC 5322 3.2.3) - # All local parts matching the dot-atom rule are also valid as a quoted string - # so if it was originally quoted (quoted_local_part is True) and this regex matches, - # it's ok. - # (RFC 5321 4.1.2 / RFC 5322 3.2.4). - if DOT_ATOM_TEXT.match(local): - # It's valid. And since it's just the permitted ASCII characters, - # it's normalized and safe. If the local part was originally quoted, - # the quoting was unnecessary and it'll be returned as normalized to - # non-quoted form. - - # Return the local part and flag that SMTPUTF8 is not needed. - return { - "local_part": local, - "ascii_local_part": local, - "smtputf8": False, - } - - # The local part failed the basic dot-atom check. Try the extended character set - # for internationalized addresses. It's the same pattern but with additional - # characters permitted. - # RFC 6531 section 3.3. - valid: Optional[str] = None - requires_smtputf8 = False - if DOT_ATOM_TEXT_INTL.match(local): - # But international characters in the local part may not be permitted. - if not allow_smtputf8: - # Check for invalid characters against the non-internationalized - # permitted character set. - # (RFC 5322 3.2.3) - bad_chars = { - safe_character_display(c) - for c in local - if not ATEXT_RE.match(c) - } - if bad_chars: - raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") - - # Although the check above should always find something, fall back to this just in case. - raise EmailSyntaxError("Internationalized characters before the @-sign are not supported.") - - # It's valid. - valid = "dot-atom" - requires_smtputf8 = True - - # There are no syntactic restrictions on quoted local parts, so if - # it was originally quoted, it is probably valid. More characters - # are allowed, like @-signs, spaces, and quotes, and there are no - # restrictions on the placement of dots, as in dot-atom local parts. - elif quoted_local_part: - # Check for invalid characters in a quoted string local part. - # (RFC 5321 4.1.2. RFC 5322 lists additional permitted *obsolete* - # characters which are *not* allowed here. RFC 6531 section 3.3 - # extends the range to UTF8 strings.) - bad_chars = { - safe_character_display(c) - for c in local - if not QTEXT_INTL.match(c) - } - if bad_chars: - raise EmailSyntaxError("The email address contains invalid characters in quotes before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") - - # See if any characters are outside of the ASCII range. - bad_chars = { - safe_character_display(c) - for c in local - if not (32 <= ord(c) <= 126) - } - if bad_chars: - requires_smtputf8 = True - - # International characters in the local part may not be permitted. - if not allow_smtputf8: - raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") - - # It's valid. - valid = "quoted" - - # If the local part matches the internationalized dot-atom form or was quoted, - # perform additional checks for Unicode strings. - if valid: - # Check that the local part is a valid, safe, and sensible Unicode string. - # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked - # by DOT_ATOM_TEXT_INTL and QTEXT_INTL. Other characters may be permitted by the - # email specs, but they may not be valid, safe, or sensible Unicode strings. - # See the function for rationale. - check_unsafe_chars(local, allow_space=(valid == "quoted")) - - # Try encoding to UTF-8. Failure is possible with some characters like - # surrogate code points, but those are checked above. Still, we don't - # want to have an unhandled exception later. - try: - local.encode("utf8") - except ValueError as e: - raise EmailSyntaxError("The email address contains an invalid character.") from e - - # If this address passes only by the quoted string form, re-quote it - # and backslash-escape quotes and backslashes (removing any unnecessary - # escapes). Per RFC 5321 4.1.2, "all quoted forms MUST be treated as equivalent, - # and the sending system SHOULD transmit the form that uses the minimum quoting possible." - if valid == "quoted": - local = '"' + re.sub(r'(["\\])', r'\\\1', local) + '"' - - return { - "local_part": local, - "ascii_local_part": local if not requires_smtputf8 else None, - "smtputf8": requires_smtputf8, - } - - # It's not a valid local part. Let's find out why. - # (Since quoted local parts are all valid or handled above, these checks - # don't apply in those cases.) - - # Check for invalid characters. - # (RFC 5322 3.2.3, plus RFC 6531 3.3) - bad_chars = { - safe_character_display(c) - for c in local - if not ATEXT_INTL_DOT_RE.match(c) - } - if bad_chars: - raise EmailSyntaxError("The email address contains invalid characters before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") - - # Check for dot errors imposted by the dot-atom rule. - # (RFC 5322 3.2.3) - check_dot_atom(local, 'An email address cannot start with a {}.', 'An email address cannot have a {} immediately before the @-sign.', is_hostname=False) - - # All of the reasons should already have been checked, but just in case - # we have a fallback message. - raise EmailSyntaxError("The email address contains invalid characters before the @-sign.") - - -def check_unsafe_chars(s: str, allow_space: bool = False) -> None: - # Check for unsafe characters or characters that would make the string - # invalid or non-sensible Unicode. - bad_chars = set() - for i, c in enumerate(s): - category = unicodedata.category(c) - if category[0] in ("L", "N", "P", "S"): - # Letters, numbers, punctuation, and symbols are permitted. - pass - elif category[0] == "M": - # Combining character in first position would combine with something - # outside of the email address if concatenated, so they are not safe. - # We also check if this occurs after the @-sign, which would not be - # sensible because it would modify the @-sign. - if i == 0: - bad_chars.add(c) - elif category == "Zs": - # Spaces outside of the ASCII range are not specifically disallowed in - # internationalized addresses as far as I can tell, but they violate - # the spirit of the non-internationalized specification that email - # addresses do not contain ASCII spaces when not quoted. Excluding - # ASCII spaces when not quoted is handled directly by the atom regex. - # - # In quoted-string local parts, spaces are explicitly permitted, and - # the ASCII space has category Zs, so we must allow it here, and we'll - # allow all Unicode spaces to be consistent. - if not allow_space: - bad_chars.add(c) - elif category[0] == "Z": - # The two line and paragraph separator characters (in categories Zl and Zp) - # are not specifically disallowed in internationalized addresses - # as far as I can tell, but they violate the spirit of the non-internationalized - # specification that email addresses do not contain line breaks when not quoted. - bad_chars.add(c) - elif category[0] == "C": - # Control, format, surrogate, private use, and unassigned code points (C) - # are all unsafe in various ways. Control and format characters can affect - # text rendering if the email address is concatenated with other text. - # Bidirectional format characters are unsafe, even if used properly, because - # they cause an email address to render as a different email address. - # Private use characters do not make sense for publicly deliverable - # email addresses. - bad_chars.add(c) - else: - # All categories should be handled above, but in case there is something new - # to the Unicode specification in the future, reject all other categories. - bad_chars.add(c) - if bad_chars: - raise EmailSyntaxError("The email address contains unsafe characters: " - + ", ".join(safe_character_display(c) for c in sorted(bad_chars)) + ".") - - -def check_dot_atom(label: str, start_descr: str, end_descr: str, is_hostname: bool) -> None: - # RFC 5322 3.2.3 - if label.endswith("."): - raise EmailSyntaxError(end_descr.format("period")) - if label.startswith("."): - raise EmailSyntaxError(start_descr.format("period")) - if ".." in label: - raise EmailSyntaxError("An email address cannot have two periods in a row.") - - if is_hostname: - # RFC 952 - if label.endswith("-"): - raise EmailSyntaxError(end_descr.format("hyphen")) - if label.startswith("-"): - raise EmailSyntaxError(start_descr.format("hyphen")) - if ".-" in label or "-." in label: - raise EmailSyntaxError("An email address cannot have a period and a hyphen next to each other.") - - -class DomainNameValidationResult(TypedDict): - ascii_domain: str - domain: str - - -def validate_email_domain_name(domain: str, test_environment: bool = False, globally_deliverable: bool = True) -> DomainNameValidationResult: - """Validates the syntax of the domain part of an email address.""" - - # Check for invalid characters. - # (RFC 952 plus RFC 6531 section 3.3 for internationalized addresses) - bad_chars = { - safe_character_display(c) - for c in domain - if not ATEXT_HOSTNAME_INTL.match(c) - } - if bad_chars: - raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") - - # Check for unsafe characters. - # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked - # by DOT_ATOM_TEXT_INTL. Other characters may be permitted by the email specs, but - # they may not be valid, safe, or sensible Unicode strings. - check_unsafe_chars(domain) - - # Perform UTS-46 normalization, which includes casefolding, NFC normalization, - # and converting all label separators (the period/full stop, fullwidth full stop, - # ideographic full stop, and halfwidth ideographic full stop) to regular dots. - # It will also raise an exception if there is an invalid character in the input, - # such as "⒈" which is invalid because it would expand to include a dot and - # U+1FEF which normalizes to a backtick, which is not an allowed hostname character. - # Since several characters *are* normalized to a dot, this has to come before - # checks related to dots, like check_dot_atom which comes next. - original_domain = domain - try: - domain = idna.uts46_remap(domain, std3_rules=False, transitional=False) - except idna.IDNAError as e: - raise EmailSyntaxError(f"The part after the @-sign contains invalid characters ({e}).") from e - - # Check for invalid characters after Unicode normalization which are not caught - # by uts46_remap (see tests for examples). - bad_chars = { - safe_character_display(c) - for c in domain - if not ATEXT_HOSTNAME_INTL.match(c) - } - if bad_chars: - raise EmailSyntaxError("The part after the @-sign contains invalid characters after Unicode normalization: " + ", ".join(sorted(bad_chars)) + ".") - - # The domain part is made up dot-separated "labels." Each label must - # have at least one character and cannot start or end with dashes, which - # means there are some surprising restrictions on periods and dashes. - # Check that before we do IDNA encoding because the IDNA library gives - # unfriendly errors for these cases, but after UTS-46 normalization because - # it can insert periods and hyphens (from fullwidth characters). - # (RFC 952, RFC 1123 2.1, RFC 5322 3.2.3) - check_dot_atom(domain, 'An email address cannot have a {} immediately after the @-sign.', 'An email address cannot end with a {}.', is_hostname=True) - - # Check for RFC 5890's invalid R-LDH labels, which are labels that start - # with two characters other than "xn" and two dashes. - for label in domain.split("."): - if re.match(r"(?!xn)..--", label, re.I): - raise EmailSyntaxError("An email address cannot have two letters followed by two dashes immediately after the @-sign or after a period, except Punycode.") - - if DOT_ATOM_TEXT_HOSTNAME.match(domain): - # This is a valid non-internationalized domain. - ascii_domain = domain - else: - # If international characters are present in the domain name, convert - # the domain to IDNA ASCII. If internationalized characters are present, - # the MTA must either support SMTPUTF8 or the mail client must convert the - # domain name to IDNA before submission. - # - # For ASCII-only domains, the transformation does nothing and is safe to - # apply. However, to ensure we don't rely on the idna library for basic - # syntax checks, we don't use it if it's not needed. - # - # idna.encode also checks the domain name length after encoding but it - # doesn't give a nice error, so we call the underlying idna.alabel method - # directly. idna.alabel checks label length and doesn't give great messages, - # but we can't easily go to lower level methods. - try: - ascii_domain = ".".join( - idna.alabel(label).decode("ascii") - for label in domain.split(".") - ) - except idna.IDNAError as e: - # Some errors would have already been raised by idna.uts46_remap. - raise EmailSyntaxError(f"The part after the @-sign is invalid ({e}).") from e - - # Check the syntax of the string returned by idna.encode. - # It should never fail. - if not DOT_ATOM_TEXT_HOSTNAME.match(ascii_domain): - raise EmailSyntaxError("The email address contains invalid characters after the @-sign after IDNA encoding.") - - # Check the length of the domain name in bytes. - # (RFC 1035 2.3.4 and RFC 5321 4.5.3.1.2) - # We're checking the number of bytes ("octets") here, which can be much - # higher than the number of characters in internationalized domains, - # on the assumption that the domain may be transmitted without SMTPUTF8 - # as IDNA ASCII. (This is also checked by idna.encode, so this exception - # is never reached for internationalized domains.) - if len(ascii_domain) > DOMAIN_MAX_LENGTH: - if ascii_domain == original_domain: - reason = get_length_reason(ascii_domain, limit=DOMAIN_MAX_LENGTH) - raise EmailSyntaxError(f"The email address is too long after the @-sign {reason}.") - else: - diff = len(ascii_domain) - DOMAIN_MAX_LENGTH - s = "" if diff == 1 else "s" - raise EmailSyntaxError(f"The email address is too long after the @-sign ({diff} byte{s} too many after IDNA encoding).") - - # Also check the label length limit. - # (RFC 1035 2.3.1) - for label in ascii_domain.split("."): - if len(label) > DNS_LABEL_LENGTH_LIMIT: - reason = get_length_reason(label, limit=DNS_LABEL_LENGTH_LIMIT) - raise EmailSyntaxError(f"After the @-sign, periods cannot be separated by so many characters {reason}.") - - if globally_deliverable: - # All publicly deliverable addresses have domain names with at least - # one period, at least for gTLDs created since 2013 (per the ICANN Board - # New gTLD Program Committee, https://www.icann.org/en/announcements/details/new-gtld-dotless-domain-names-prohibited-30-8-2013-en). - # We'll consider the lack of a period a syntax error - # since that will match people's sense of what an email address looks - # like. We'll skip this in test environments to allow '@test' email - # addresses. - if "." not in ascii_domain and not (ascii_domain == "test" and test_environment): - raise EmailSyntaxError("The part after the @-sign is not valid. It should have a period.") - - # We also know that all TLDs currently end with a letter. - if not DOMAIN_NAME_REGEX.search(ascii_domain): - raise EmailSyntaxError("The part after the @-sign is not valid. It is not within a valid top-level domain.") - - # Check special-use and reserved domain names. - # Some might fail DNS-based deliverability checks, but that - # can be turned off, so we should fail them all sooner. - # See the references in __init__.py. - from . import SPECIAL_USE_DOMAIN_NAMES - for d in SPECIAL_USE_DOMAIN_NAMES: - # See the note near the definition of SPECIAL_USE_DOMAIN_NAMES. - if d == "test" and test_environment: - continue - - if ascii_domain == d or ascii_domain.endswith("." + d): - raise EmailSyntaxError("The part after the @-sign is a special-use or reserved name that cannot be used with email.") - - # We may have been given an IDNA ASCII domain to begin with. Check - # that the domain actually conforms to IDNA. It could look like IDNA - # but not be actual IDNA. For ASCII-only domains, the conversion out - # of IDNA just gives the same thing back. - # - # This gives us the canonical internationalized form of the domain, - # which we return to the caller as a part of the normalized email - # address. - try: - domain_i18n = idna.decode(ascii_domain.encode('ascii')) - except idna.IDNAError as e: - raise EmailSyntaxError(f"The part after the @-sign is not valid IDNA ({e}).") from e - - # Check that this normalized domain name has not somehow become - # an invalid domain name. All of the checks before this point - # using the idna package probably guarantee that we now have - # a valid international domain name in most respects. But it - # doesn't hurt to re-apply some tests to be sure. See the similar - # tests above. - - # Check for invalid and unsafe characters. We have no test - # case for this. - bad_chars = { - safe_character_display(c) - for c in domain - if not ATEXT_HOSTNAME_INTL.match(c) - } - if bad_chars: - raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") - check_unsafe_chars(domain) - - # Check that it can be encoded back to IDNA ASCII. We have no test - # case for this. - try: - idna.encode(domain_i18n) - except idna.IDNAError as e: - raise EmailSyntaxError(f"The part after the @-sign became invalid after normalizing to international characters ({e}).") from e - - # Return the IDNA ASCII-encoded form of the domain, which is how it - # would be transmitted on the wire (except when used with SMTPUTF8 - # possibly), as well as the canonical Unicode form of the domain, - # which is better for display purposes. This should also take care - # of RFC 6532 section 3.1's suggestion to apply Unicode NFC - # normalization to addresses. - return { - "ascii_domain": ascii_domain, - "domain": domain_i18n, - } - - -def validate_email_length(addrinfo: ValidatedEmail) -> None: - # There are three forms of the email address whose length must be checked: - # - # 1) The original email address string. Since callers may continue to use - # this string, even though we recommend using the normalized form, we - # should not pass validation when the original input is not valid. This - # form is checked first because it is the original input. - # 2) The normalized email address. We perform Unicode NFC normalization of - # the local part, we normalize the domain to internationalized characters - # (if originaly IDNA ASCII) which also includes Unicode normalization, - # and we may remove quotes in quoted local parts. We recommend that - # callers use this string, so it must be valid. - # 3) The email address with the IDNA ASCII representation of the domain - # name, since this string may be used with email stacks that don't - # support UTF-8. Since this is the least likely to be used by callers, - # it is checked last. Note that ascii_email will only be set if the - # local part is ASCII, but conceivably the caller may combine a - # internationalized local part with an ASCII domain, so we check this - # on that combination also. Since we only return the normalized local - # part, we use that (and not the unnormalized local part). - # - # In all cases, the length is checked in UTF-8 because the SMTPUTF8 - # extension to SMTP validates the length in bytes. - - addresses_to_check = [ - (addrinfo.original, None), - (addrinfo.normalized, "after normalization"), - ((addrinfo.ascii_local_part or addrinfo.local_part or "") + "@" + addrinfo.ascii_domain, "when the part after the @-sign is converted to IDNA ASCII"), - ] - - for addr, reason in addresses_to_check: - addr_len = len(addr) - addr_utf8_len = len(addr.encode("utf8")) - diff = addr_utf8_len - EMAIL_MAX_LENGTH - if diff > 0: - if reason is None and addr_len == addr_utf8_len: - # If there is no normalization or transcoding, - # we can give a simple count of the number of - # characters over the limit. - reason = get_length_reason(addr, limit=EMAIL_MAX_LENGTH) - elif reason is None: - # If there is no normalization but there is - # some transcoding to UTF-8, we can compute - # the minimum number of characters over the - # limit by dividing the number of bytes over - # the limit by the maximum number of bytes - # per character. - mbpc = max(len(c.encode("utf8")) for c in addr) - mchars = max(1, diff // mbpc) - suffix = "s" if diff > 1 else "" - if mchars == diff: - reason = f"({diff} character{suffix} too many)" - else: - reason = f"({mchars}-{diff} character{suffix} too many)" - else: - # Since there is normalization, the number of - # characters in the input that need to change is - # impossible to know. - suffix = "s" if diff > 1 else "" - reason += f" ({diff} byte{suffix} too many)" - raise EmailSyntaxError(f"The email address is too long {reason}.") - - -class DomainLiteralValidationResult(TypedDict): - domain_address: Union[ipaddress.IPv4Address, ipaddress.IPv6Address] - domain: str - - -def validate_email_domain_literal(domain_literal: str) -> DomainLiteralValidationResult: - # This is obscure domain-literal syntax. Parse it and return - # a compressed/normalized address. - # RFC 5321 4.1.3 and RFC 5322 3.4.1. - - addr: Union[ipaddress.IPv4Address, ipaddress.IPv6Address] - - # Try to parse the domain literal as an IPv4 address. - # There is no tag for IPv4 addresses, so we can never - # be sure if the user intends an IPv4 address. - if re.match(r"^[0-9\.]+$", domain_literal): - try: - addr = ipaddress.IPv4Address(domain_literal) - except ValueError as e: - raise EmailSyntaxError(f"The address in brackets after the @-sign is not valid: It is not an IPv4 address ({e}) or is missing an address literal tag.") from e - - # Return the IPv4Address object and the domain back unchanged. - return { - "domain_address": addr, - "domain": f"[{addr}]", - } - - # If it begins with "IPv6:" it's an IPv6 address. - if domain_literal.startswith("IPv6:"): - try: - addr = ipaddress.IPv6Address(domain_literal[5:]) - except ValueError as e: - raise EmailSyntaxError(f"The IPv6 address in brackets after the @-sign is not valid ({e}).") from e - - # Return the IPv6Address object and construct a normalized - # domain literal. - return { - "domain_address": addr, - "domain": f"[IPv6:{addr.compressed}]", - } - - # Nothing else is valid. - - if ":" not in domain_literal: - raise EmailSyntaxError("The part after the @-sign in brackets is not an IPv4 address and has no address literal tag.") - - # The tag (the part before the colon) has character restrictions, - # but since it must come from a registry of tags (in which only "IPv6" is defined), - # there's no need to check the syntax of the tag. See RFC 5321 4.1.2. - - # Check for permitted ASCII characters. This actually doesn't matter - # since there will be an exception after anyway. - bad_chars = { - safe_character_display(c) - for c in domain_literal - if not DOMAIN_LITERAL_CHARS.match(c) - } - if bad_chars: - raise EmailSyntaxError("The part after the @-sign contains invalid characters in brackets: " + ", ".join(sorted(bad_chars)) + ".") - - # There are no other domain literal tags. - # https://www.iana.org/assignments/address-literal-tags/address-literal-tags.xhtml - raise EmailSyntaxError("The part after the @-sign contains an invalid address literal tag in brackets.") diff --git a/venv/lib/python3.12/site-packages/email_validator/validate_email.py b/venv/lib/python3.12/site-packages/email_validator/validate_email.py deleted file mode 100644 index a134c77d..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/validate_email.py +++ /dev/null @@ -1,180 +0,0 @@ -from typing import Optional, Union, TYPE_CHECKING -import unicodedata - -from .exceptions_types import EmailSyntaxError, ValidatedEmail -from .syntax import split_email, validate_email_local_part, validate_email_domain_name, validate_email_domain_literal, validate_email_length -from .rfc_constants import CASE_INSENSITIVE_MAILBOX_NAMES - -if TYPE_CHECKING: - import dns.resolver - _Resolver = dns.resolver.Resolver -else: - _Resolver = object - - -def validate_email( - email: Union[str, bytes], - /, # prior arguments are positional-only - *, # subsequent arguments are keyword-only - allow_smtputf8: Optional[bool] = None, - allow_empty_local: bool = False, - allow_quoted_local: Optional[bool] = None, - allow_domain_literal: Optional[bool] = None, - allow_display_name: Optional[bool] = None, - check_deliverability: Optional[bool] = None, - test_environment: Optional[bool] = None, - globally_deliverable: Optional[bool] = None, - timeout: Optional[int] = None, - dns_resolver: Optional[_Resolver] = None -) -> ValidatedEmail: - """ - Given an email address, and some options, returns a ValidatedEmail instance - with information about the address if it is valid or, if the address is not - valid, raises an EmailNotValidError. This is the main function of the module. - """ - - # Fill in default values of arguments. - from . import ALLOW_SMTPUTF8, ALLOW_QUOTED_LOCAL, ALLOW_DOMAIN_LITERAL, ALLOW_DISPLAY_NAME, \ - GLOBALLY_DELIVERABLE, CHECK_DELIVERABILITY, TEST_ENVIRONMENT, DEFAULT_TIMEOUT - if allow_smtputf8 is None: - allow_smtputf8 = ALLOW_SMTPUTF8 - if allow_quoted_local is None: - allow_quoted_local = ALLOW_QUOTED_LOCAL - if allow_domain_literal is None: - allow_domain_literal = ALLOW_DOMAIN_LITERAL - if allow_display_name is None: - allow_display_name = ALLOW_DISPLAY_NAME - if check_deliverability is None: - check_deliverability = CHECK_DELIVERABILITY - if test_environment is None: - test_environment = TEST_ENVIRONMENT - if globally_deliverable is None: - globally_deliverable = GLOBALLY_DELIVERABLE - if timeout is None and dns_resolver is None: - timeout = DEFAULT_TIMEOUT - - # Allow email to be a str or bytes instance. If bytes, - # it must be ASCII because that's how the bytes work - # on the wire with SMTP. - if not isinstance(email, str): - try: - email = email.decode("ascii") - except ValueError as e: - raise EmailSyntaxError("The email address is not valid ASCII.") from e - - # Split the address into the display name (or None), the local part - # (before the @-sign), and the domain part (after the @-sign). - # Normally, there is only one @-sign. But the awkward "quoted string" - # local part form (RFC 5321 4.1.2) allows @-signs in the local - # part if the local part is quoted. - display_name, local_part, domain_part, is_quoted_local_part \ - = split_email(email) - - # Collect return values in this instance. - ret = ValidatedEmail() - ret.original = ((local_part if not is_quoted_local_part - else ('"' + local_part + '"')) - + "@" + domain_part) # drop the display name, if any, for email length tests at the end - ret.display_name = display_name - - # Validate the email address's local part syntax and get a normalized form. - # If the original address was quoted and the decoded local part is a valid - # unquoted local part, then we'll get back a normalized (unescaped) local - # part. - local_part_info = validate_email_local_part(local_part, - allow_smtputf8=allow_smtputf8, - allow_empty_local=allow_empty_local, - quoted_local_part=is_quoted_local_part) - ret.local_part = local_part_info["local_part"] - ret.ascii_local_part = local_part_info["ascii_local_part"] - ret.smtputf8 = local_part_info["smtputf8"] - - # RFC 6532 section 3.1 says that Unicode NFC normalization should be applied, - # so we'll return the NFC-normalized local part. Since the caller may use that - # string in place of the original string, ensure it is also valid. - normalized_local_part = unicodedata.normalize("NFC", ret.local_part) - if normalized_local_part != ret.local_part: - try: - validate_email_local_part(normalized_local_part, - allow_smtputf8=allow_smtputf8, - allow_empty_local=allow_empty_local, - quoted_local_part=is_quoted_local_part) - except EmailSyntaxError as e: - raise EmailSyntaxError("After Unicode normalization: " + str(e)) from e - ret.local_part = normalized_local_part - - # If a quoted local part isn't allowed but is present, now raise an exception. - # This is done after any exceptions raised by validate_email_local_part so - # that mandatory checks have highest precedence. - if is_quoted_local_part and not allow_quoted_local: - raise EmailSyntaxError("Quoting the part before the @-sign is not allowed here.") - - # Some local parts are required to be case-insensitive, so we should normalize - # to lowercase. - # RFC 2142 - if ret.ascii_local_part is not None \ - and ret.ascii_local_part.lower() in CASE_INSENSITIVE_MAILBOX_NAMES \ - and ret.local_part is not None: - ret.ascii_local_part = ret.ascii_local_part.lower() - ret.local_part = ret.local_part.lower() - - # Validate the email address's domain part syntax and get a normalized form. - is_domain_literal = False - if len(domain_part) == 0: - raise EmailSyntaxError("There must be something after the @-sign.") - - elif domain_part.startswith("[") and domain_part.endswith("]"): - # Parse the address in the domain literal and get back a normalized domain. - domain_literal_info = validate_email_domain_literal(domain_part[1:-1]) - if not allow_domain_literal: - raise EmailSyntaxError("A bracketed IP address after the @-sign is not allowed here.") - ret.domain = domain_literal_info["domain"] - ret.ascii_domain = domain_literal_info["domain"] # Domain literals are always ASCII. - ret.domain_address = domain_literal_info["domain_address"] - is_domain_literal = True # Prevent deliverability checks. - - else: - # Check the syntax of the domain and get back a normalized - # internationalized and ASCII form. - domain_name_info = validate_email_domain_name(domain_part, test_environment=test_environment, globally_deliverable=globally_deliverable) - ret.domain = domain_name_info["domain"] - ret.ascii_domain = domain_name_info["ascii_domain"] - - # Construct the complete normalized form. - ret.normalized = ret.local_part + "@" + ret.domain - - # If the email address has an ASCII form, add it. - if not ret.smtputf8: - if not ret.ascii_domain: - raise Exception("Missing ASCII domain.") - ret.ascii_email = (ret.ascii_local_part or "") + "@" + ret.ascii_domain - else: - ret.ascii_email = None - - # Check the length of the address. - validate_email_length(ret) - - # Check that a display name is permitted. It's the last syntax check - # because we always check against optional parsing features last. - if display_name is not None and not allow_display_name: - raise EmailSyntaxError("A display name and angle brackets around the email address are not permitted here.") - - if check_deliverability and not test_environment: - # Validate the email address's deliverability using DNS - # and update the returned ValidatedEmail object with metadata. - - if is_domain_literal: - # There is nothing to check --- skip deliverability checks. - return ret - - # Lazy load `deliverability` as it is slow to import (due to dns.resolver) - from .deliverability import validate_email_deliverability - deliverability_info = validate_email_deliverability( - ret.ascii_domain, ret.domain, timeout, dns_resolver - ) - mx = deliverability_info.get("mx") - if mx is not None: - ret.mx = mx - ret.mx_fallback_type = deliverability_info.get("mx_fallback_type") - - return ret diff --git a/venv/lib/python3.12/site-packages/email_validator/version.py b/venv/lib/python3.12/site-packages/email_validator/version.py deleted file mode 100644 index 8a124bf6..00000000 --- a/venv/lib/python3.12/site-packages/email_validator/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "2.2.0" diff --git a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/METADATA deleted file mode 100644 index fe82f2cb..00000000 --- a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/METADATA +++ /dev/null @@ -1,575 +0,0 @@ -Metadata-Version: 2.1 -Name: fastapi -Version: 0.116.1 -Summary: FastAPI framework, high performance, easy to learn, fast to code, ready for production -Author-Email: =?utf-8?q?Sebasti=C3=A1n_Ram=C3=ADrez?= -Classifier: Intended Audience :: Information Technology -Classifier: Intended Audience :: System Administrators -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python -Classifier: Topic :: Internet -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: Libraries -Classifier: Topic :: Software Development -Classifier: Typing :: Typed -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: FastAPI -Classifier: Framework :: Pydantic -Classifier: Framework :: Pydantic :: 1 -Classifier: Framework :: Pydantic :: 2 -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers -Classifier: Topic :: Internet :: WWW/HTTP -Project-URL: Homepage, https://github.com/fastapi/fastapi -Project-URL: Documentation, https://fastapi.tiangolo.com/ -Project-URL: Repository, https://github.com/fastapi/fastapi -Project-URL: Issues, https://github.com/fastapi/fastapi/issues -Project-URL: Changelog, https://fastapi.tiangolo.com/release-notes/ -Requires-Python: >=3.8 -Requires-Dist: starlette<0.48.0,>=0.40.0 -Requires-Dist: pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4 -Requires-Dist: typing-extensions>=4.8.0 -Provides-Extra: standard -Requires-Dist: fastapi-cli[standard]>=0.0.8; extra == "standard" -Requires-Dist: httpx>=0.23.0; extra == "standard" -Requires-Dist: jinja2>=3.1.5; extra == "standard" -Requires-Dist: python-multipart>=0.0.18; extra == "standard" -Requires-Dist: email-validator>=2.0.0; extra == "standard" -Requires-Dist: uvicorn[standard]>=0.12.0; extra == "standard" -Provides-Extra: standard-no-fastapi-cloud-cli -Requires-Dist: fastapi-cli[standard-no-fastapi-cloud-cli]>=0.0.8; extra == "standard-no-fastapi-cloud-cli" -Requires-Dist: httpx>=0.23.0; extra == "standard-no-fastapi-cloud-cli" -Requires-Dist: jinja2>=3.1.5; extra == "standard-no-fastapi-cloud-cli" -Requires-Dist: python-multipart>=0.0.18; extra == "standard-no-fastapi-cloud-cli" -Requires-Dist: email-validator>=2.0.0; extra == "standard-no-fastapi-cloud-cli" -Requires-Dist: uvicorn[standard]>=0.12.0; extra == "standard-no-fastapi-cloud-cli" -Provides-Extra: all -Requires-Dist: fastapi-cli[standard]>=0.0.8; extra == "all" -Requires-Dist: httpx>=0.23.0; extra == "all" -Requires-Dist: jinja2>=3.1.5; extra == "all" -Requires-Dist: python-multipart>=0.0.18; extra == "all" -Requires-Dist: itsdangerous>=1.1.0; extra == "all" -Requires-Dist: pyyaml>=5.3.1; extra == "all" -Requires-Dist: ujson!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,>=4.0.1; extra == "all" -Requires-Dist: orjson>=3.2.1; extra == "all" -Requires-Dist: email-validator>=2.0.0; extra == "all" -Requires-Dist: uvicorn[standard]>=0.12.0; extra == "all" -Requires-Dist: pydantic-settings>=2.0.0; extra == "all" -Requires-Dist: pydantic-extra-types>=2.0.0; extra == "all" -Description-Content-Type: text/markdown - -

- FastAPI -

-

- FastAPI framework, high performance, easy to learn, fast to code, ready for production -

-

- - Test - - - Coverage - - - Package version - - - Supported Python versions - -

- ---- - -**Documentation**: https://fastapi.tiangolo.com - -**Source Code**: https://github.com/fastapi/fastapi - ---- - -FastAPI is a modern, fast (high-performance), web framework for building APIs with Python based on standard Python type hints. - -The key features are: - -* **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic). [One of the fastest Python frameworks available](#performance). -* **Fast to code**: Increase the speed to develop features by about 200% to 300%. * -* **Fewer bugs**: Reduce about 40% of human (developer) induced errors. * -* **Intuitive**: Great editor support. Completion everywhere. Less time debugging. -* **Easy**: Designed to be easy to use and learn. Less time reading docs. -* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. -* **Robust**: Get production-ready code. With automatic interactive documentation. -* **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI (previously known as Swagger) and JSON Schema. - -* estimation based on tests on an internal development team, building production applications. - -## Sponsors - - - - - - - - - - - - - - - - - - - - - - -Other sponsors - -## Opinions - -"_[...] I'm using **FastAPI** a ton these days. [...] I'm actually planning to use it for all of my team's **ML services at Microsoft**. Some of them are getting integrated into the core **Windows** product and some **Office** products._" - -
Kabir Khan - Microsoft (ref)
- ---- - -"_We adopted the **FastAPI** library to spawn a **REST** server that can be queried to obtain **predictions**. [for Ludwig]_" - -
Piero Molino, Yaroslav Dudin, and Sai Sumanth Miryala - Uber (ref)
- ---- - -"_**Netflix** is pleased to announce the open-source release of our **crisis management** orchestration framework: **Dispatch**! [built with **FastAPI**]_" - -
Kevin Glisson, Marc Vilanova, Forest Monsen - Netflix (ref)
- ---- - -"_I’m over the moon excited about **FastAPI**. It’s so fun!_" - -
Brian Okken - Python Bytes podcast host (ref)
- ---- - -"_Honestly, what you've built looks super solid and polished. In many ways, it's what I wanted **Hug** to be - it's really inspiring to see someone build that._" - -
Timothy Crosley - Hug creator (ref)
- ---- - -"_If you're looking to learn one **modern framework** for building REST APIs, check out **FastAPI** [...] It's fast, easy to use and easy to learn [...]_" - -"_We've switched over to **FastAPI** for our **APIs** [...] I think you'll like it [...]_" - -
Ines Montani - Matthew Honnibal - Explosion AI founders - spaCy creators (ref) - (ref)
- ---- - -"_If anyone is looking to build a production Python API, I would highly recommend **FastAPI**. It is **beautifully designed**, **simple to use** and **highly scalable**, it has become a **key component** in our API first development strategy and is driving many automations and services such as our Virtual TAC Engineer._" - -
Deon Pillsbury - Cisco (ref)
- ---- - -## **Typer**, the FastAPI of CLIs - - - -If you are building a CLI app to be used in the terminal instead of a web API, check out **Typer**. - -**Typer** is FastAPI's little sibling. And it's intended to be the **FastAPI of CLIs**. ⌨️ 🚀 - -## Requirements - -FastAPI stands on the shoulders of giants: - -* Starlette for the web parts. -* Pydantic for the data parts. - -## Installation - -Create and activate a virtual environment and then install FastAPI: - -
- -```console -$ pip install "fastapi[standard]" - ----> 100% -``` - -
- -**Note**: Make sure you put `"fastapi[standard]"` in quotes to ensure it works in all terminals. - -## Example - -### Create it - -Create a file `main.py` with: - -```Python -from typing import Union - -from fastapi import FastAPI - -app = FastAPI() - - -@app.get("/") -def read_root(): - return {"Hello": "World"} - - -@app.get("/items/{item_id}") -def read_item(item_id: int, q: Union[str, None] = None): - return {"item_id": item_id, "q": q} -``` - -
-Or use async def... - -If your code uses `async` / `await`, use `async def`: - -```Python hl_lines="9 14" -from typing import Union - -from fastapi import FastAPI - -app = FastAPI() - - -@app.get("/") -async def read_root(): - return {"Hello": "World"} - - -@app.get("/items/{item_id}") -async def read_item(item_id: int, q: Union[str, None] = None): - return {"item_id": item_id, "q": q} -``` - -**Note**: - -If you don't know, check the _"In a hurry?"_ section about `async` and `await` in the docs. - -
- -### Run it - -Run the server with: - -
- -```console -$ fastapi dev main.py - - ╭────────── FastAPI CLI - Development mode ───────────╮ - │ │ - │ Serving at: http://127.0.0.1:8000 │ - │ │ - │ API docs: http://127.0.0.1:8000/docs │ - │ │ - │ Running in development mode, for production use: │ - │ │ - │ fastapi run │ - │ │ - ╰─────────────────────────────────────────────────────╯ - -INFO: Will watch for changes in these directories: ['/home/user/code/awesomeapp'] -INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) -INFO: Started reloader process [2248755] using WatchFiles -INFO: Started server process [2248757] -INFO: Waiting for application startup. -INFO: Application startup complete. -``` - -
- -
-About the command fastapi dev main.py... - -The command `fastapi dev` reads your `main.py` file, detects the **FastAPI** app in it, and starts a server using Uvicorn. - -By default, `fastapi dev` will start with auto-reload enabled for local development. - -You can read more about it in the FastAPI CLI docs. - -
- -### Check it - -Open your browser at http://127.0.0.1:8000/items/5?q=somequery. - -You will see the JSON response as: - -```JSON -{"item_id": 5, "q": "somequery"} -``` - -You already created an API that: - -* Receives HTTP requests in the _paths_ `/` and `/items/{item_id}`. -* Both _paths_ take `GET` operations (also known as HTTP _methods_). -* The _path_ `/items/{item_id}` has a _path parameter_ `item_id` that should be an `int`. -* The _path_ `/items/{item_id}` has an optional `str` _query parameter_ `q`. - -### Interactive API docs - -Now go to http://127.0.0.1:8000/docs. - -You will see the automatic interactive API documentation (provided by Swagger UI): - -![Swagger UI](https://fastapi.tiangolo.com/img/index/index-01-swagger-ui-simple.png) - -### Alternative API docs - -And now, go to http://127.0.0.1:8000/redoc. - -You will see the alternative automatic documentation (provided by ReDoc): - -![ReDoc](https://fastapi.tiangolo.com/img/index/index-02-redoc-simple.png) - -## Example upgrade - -Now modify the file `main.py` to receive a body from a `PUT` request. - -Declare the body using standard Python types, thanks to Pydantic. - -```Python hl_lines="4 9-12 25-27" -from typing import Union - -from fastapi import FastAPI -from pydantic import BaseModel - -app = FastAPI() - - -class Item(BaseModel): - name: str - price: float - is_offer: Union[bool, None] = None - - -@app.get("/") -def read_root(): - return {"Hello": "World"} - - -@app.get("/items/{item_id}") -def read_item(item_id: int, q: Union[str, None] = None): - return {"item_id": item_id, "q": q} - - -@app.put("/items/{item_id}") -def update_item(item_id: int, item: Item): - return {"item_name": item.name, "item_id": item_id} -``` - -The `fastapi dev` server should reload automatically. - -### Interactive API docs upgrade - -Now go to http://127.0.0.1:8000/docs. - -* The interactive API documentation will be automatically updated, including the new body: - -![Swagger UI](https://fastapi.tiangolo.com/img/index/index-03-swagger-02.png) - -* Click on the button "Try it out", it allows you to fill the parameters and directly interact with the API: - -![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-04-swagger-03.png) - -* Then click on the "Execute" button, the user interface will communicate with your API, send the parameters, get the results and show them on the screen: - -![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-05-swagger-04.png) - -### Alternative API docs upgrade - -And now, go to http://127.0.0.1:8000/redoc. - -* The alternative documentation will also reflect the new query parameter and body: - -![ReDoc](https://fastapi.tiangolo.com/img/index/index-06-redoc-02.png) - -### Recap - -In summary, you declare **once** the types of parameters, body, etc. as function parameters. - -You do that with standard modern Python types. - -You don't have to learn a new syntax, the methods or classes of a specific library, etc. - -Just standard **Python**. - -For example, for an `int`: - -```Python -item_id: int -``` - -or for a more complex `Item` model: - -```Python -item: Item -``` - -...and with that single declaration you get: - -* Editor support, including: - * Completion. - * Type checks. -* Validation of data: - * Automatic and clear errors when the data is invalid. - * Validation even for deeply nested JSON objects. -* Conversion of input data: coming from the network to Python data and types. Reading from: - * JSON. - * Path parameters. - * Query parameters. - * Cookies. - * Headers. - * Forms. - * Files. -* Conversion of output data: converting from Python data and types to network data (as JSON): - * Convert Python types (`str`, `int`, `float`, `bool`, `list`, etc). - * `datetime` objects. - * `UUID` objects. - * Database models. - * ...and many more. -* Automatic interactive API documentation, including 2 alternative user interfaces: - * Swagger UI. - * ReDoc. - ---- - -Coming back to the previous code example, **FastAPI** will: - -* Validate that there is an `item_id` in the path for `GET` and `PUT` requests. -* Validate that the `item_id` is of type `int` for `GET` and `PUT` requests. - * If it is not, the client will see a useful, clear error. -* Check if there is an optional query parameter named `q` (as in `http://127.0.0.1:8000/items/foo?q=somequery`) for `GET` requests. - * As the `q` parameter is declared with `= None`, it is optional. - * Without the `None` it would be required (as is the body in the case with `PUT`). -* For `PUT` requests to `/items/{item_id}`, read the body as JSON: - * Check that it has a required attribute `name` that should be a `str`. - * Check that it has a required attribute `price` that has to be a `float`. - * Check that it has an optional attribute `is_offer`, that should be a `bool`, if present. - * All this would also work for deeply nested JSON objects. -* Convert from and to JSON automatically. -* Document everything with OpenAPI, that can be used by: - * Interactive documentation systems. - * Automatic client code generation systems, for many languages. -* Provide 2 interactive documentation web interfaces directly. - ---- - -We just scratched the surface, but you already get the idea of how it all works. - -Try changing the line with: - -```Python - return {"item_name": item.name, "item_id": item_id} -``` - -...from: - -```Python - ... "item_name": item.name ... -``` - -...to: - -```Python - ... "item_price": item.price ... -``` - -...and see how your editor will auto-complete the attributes and know their types: - -![editor support](https://fastapi.tiangolo.com/img/vscode-completion.png) - -For a more complete example including more features, see the Tutorial - User Guide. - -**Spoiler alert**: the tutorial - user guide includes: - -* Declaration of **parameters** from other different places as: **headers**, **cookies**, **form fields** and **files**. -* How to set **validation constraints** as `maximum_length` or `regex`. -* A very powerful and easy to use **Dependency Injection** system. -* Security and authentication, including support for **OAuth2** with **JWT tokens** and **HTTP Basic** auth. -* More advanced (but equally easy) techniques for declaring **deeply nested JSON models** (thanks to Pydantic). -* **GraphQL** integration with Strawberry and other libraries. -* Many extra features (thanks to Starlette) as: - * **WebSockets** - * extremely easy tests based on HTTPX and `pytest` - * **CORS** - * **Cookie Sessions** - * ...and more. - -## Performance - -Independent TechEmpower benchmarks show **FastAPI** applications running under Uvicorn as one of the fastest Python frameworks available, only below Starlette and Uvicorn themselves (used internally by FastAPI). (*) - -To understand more about it, see the section Benchmarks. - -## Dependencies - -FastAPI depends on Pydantic and Starlette. - -### `standard` Dependencies - -When you install FastAPI with `pip install "fastapi[standard]"` it comes with the `standard` group of optional dependencies: - -Used by Pydantic: - -* email-validator - for email validation. - -Used by Starlette: - -* httpx - Required if you want to use the `TestClient`. -* jinja2 - Required if you want to use the default template configuration. -* python-multipart - Required if you want to support form "parsing", with `request.form()`. - -Used by FastAPI: - -* uvicorn - for the server that loads and serves your application. This includes `uvicorn[standard]`, which includes some dependencies (e.g. `uvloop`) needed for high performance serving. -* `fastapi-cli[standard]` - to provide the `fastapi` command. - * This includes `fastapi-cloud-cli`, which allows you to deploy your FastAPI application to FastAPI Cloud. - -### Without `standard` Dependencies - -If you don't want to include the `standard` optional dependencies, you can install with `pip install fastapi` instead of `pip install "fastapi[standard]"`. - -### Without `fastapi-cloud-cli` - -If you want to install FastAPI with the standard dependencies but without the `fastapi-cloud-cli`, you can install with `pip install "fastapi[standard-no-fastapi-cloud-cli]"`. - -### Additional Optional Dependencies - -There are some additional dependencies you might want to install. - -Additional optional Pydantic dependencies: - -* pydantic-settings - for settings management. -* pydantic-extra-types - for extra types to be used with Pydantic. - -Additional optional FastAPI dependencies: - -* orjson - Required if you want to use `ORJSONResponse`. -* ujson - Required if you want to use `UJSONResponse`. - -## License - -This project is licensed under the terms of the MIT license. diff --git a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/RECORD deleted file mode 100644 index 95a390ad..00000000 --- a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/RECORD +++ /dev/null @@ -1,97 +0,0 @@ -../../../bin/fastapi,sha256=GCxYfrTKZOMvDEpyNE1ruphc-yxWZF92akImUptQ86g,238 -fastapi-0.116.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -fastapi-0.116.1.dist-info/METADATA,sha256=pnu4s6rAsNuB66sYhB59UFxRuZv2ua6fbH_jUM6HM2k,28115 -fastapi-0.116.1.dist-info/RECORD,, -fastapi-0.116.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi-0.116.1.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90 -fastapi-0.116.1.dist-info/entry_points.txt,sha256=GCf-WbIZxyGT4MUmrPGj1cOHYZoGsNPHAvNkT6hnGeA,61 -fastapi-0.116.1.dist-info/licenses/LICENSE,sha256=Tsif_IFIW5f-xYSy1KlhAy7v_oNEU4lP2cEnSQbMdE4,1086 -fastapi/__init__.py,sha256=-U8vW9K3Hy78v_3O0ECrEfMmPtSuHaA1yAql96bd8ts,1081 -fastapi/__main__.py,sha256=bKePXLdO4SsVSM6r9SVoLickJDcR2c0cTOxZRKq26YQ,37 -fastapi/__pycache__/__init__.cpython-312.pyc,, -fastapi/__pycache__/__main__.cpython-312.pyc,, -fastapi/__pycache__/_compat.cpython-312.pyc,, -fastapi/__pycache__/applications.cpython-312.pyc,, -fastapi/__pycache__/background.cpython-312.pyc,, -fastapi/__pycache__/cli.cpython-312.pyc,, -fastapi/__pycache__/concurrency.cpython-312.pyc,, -fastapi/__pycache__/datastructures.cpython-312.pyc,, -fastapi/__pycache__/encoders.cpython-312.pyc,, -fastapi/__pycache__/exception_handlers.cpython-312.pyc,, -fastapi/__pycache__/exceptions.cpython-312.pyc,, -fastapi/__pycache__/logger.cpython-312.pyc,, -fastapi/__pycache__/param_functions.cpython-312.pyc,, -fastapi/__pycache__/params.cpython-312.pyc,, -fastapi/__pycache__/requests.cpython-312.pyc,, -fastapi/__pycache__/responses.cpython-312.pyc,, -fastapi/__pycache__/routing.cpython-312.pyc,, -fastapi/__pycache__/staticfiles.cpython-312.pyc,, -fastapi/__pycache__/templating.cpython-312.pyc,, -fastapi/__pycache__/testclient.cpython-312.pyc,, -fastapi/__pycache__/types.cpython-312.pyc,, -fastapi/__pycache__/utils.cpython-312.pyc,, -fastapi/__pycache__/websockets.cpython-312.pyc,, -fastapi/_compat.py,sha256=PwGTZd6d-u2o6YF9M8pQahuBtD_3q3Kpj7vU5-ngChc,24228 -fastapi/applications.py,sha256=rZTr0Ix-vdMwh6MQGCI_NC-Ir9lpfIGHHBY-JnNWZ_E,176550 -fastapi/background.py,sha256=rouLirxUANrcYC824MSMypXL_Qb2HYg2YZqaiEqbEKI,1768 -fastapi/cli.py,sha256=OYhZb0NR_deuT5ofyPF2NoNBzZDNOP8Salef2nk-HqA,418 -fastapi/concurrency.py,sha256=MirfowoSpkMQZ8j_g0ZxaQKpV6eB3G-dB5TgcXCrgEA,1424 -fastapi/datastructures.py,sha256=b2PEz77XGq-u3Ur1Inwk0AGjOsQZO49yF9C7IPJ15cY,5766 -fastapi/dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi/dependencies/__pycache__/__init__.cpython-312.pyc,, -fastapi/dependencies/__pycache__/models.cpython-312.pyc,, -fastapi/dependencies/__pycache__/utils.cpython-312.pyc,, -fastapi/dependencies/models.py,sha256=Pjl6vx-4nZ5Tta9kJa3-RfQKkXtCpS09-FhMgs9eWNs,1507 -fastapi/dependencies/utils.py,sha256=wGN-BAb0NpG-89nA_OllS0F4wYwGfhHgb8IuT3MTqck,36619 -fastapi/encoders.py,sha256=LvwYmFeOz4tVwvgBoC5rvZnbr7hZr73KGrU8O7zSptU,11068 -fastapi/exception_handlers.py,sha256=MBrIOA-ugjJDivIi4rSsUJBdTsjuzN76q4yh0q1COKw,1332 -fastapi/exceptions.py,sha256=taNixuFEXb67lI1bnX1ubq8y8TseJ4yoPlWjyP0fTzk,4969 -fastapi/logger.py,sha256=I9NNi3ov8AcqbsbC9wl1X-hdItKgYt2XTrx1f99Zpl4,54 -fastapi/middleware/__init__.py,sha256=oQDxiFVcc1fYJUOIFvphnK7pTT5kktmfL32QXpBFvvo,58 -fastapi/middleware/__pycache__/__init__.cpython-312.pyc,, -fastapi/middleware/__pycache__/cors.cpython-312.pyc,, -fastapi/middleware/__pycache__/gzip.cpython-312.pyc,, -fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc,, -fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc,, -fastapi/middleware/__pycache__/wsgi.cpython-312.pyc,, -fastapi/middleware/cors.py,sha256=ynwjWQZoc_vbhzZ3_ZXceoaSrslHFHPdoM52rXr0WUU,79 -fastapi/middleware/gzip.py,sha256=xM5PcsH8QlAimZw4VDvcmTnqQamslThsfe3CVN2voa0,79 -fastapi/middleware/httpsredirect.py,sha256=rL8eXMnmLijwVkH7_400zHri1AekfeBd6D6qs8ix950,115 -fastapi/middleware/trustedhost.py,sha256=eE5XGRxGa7c5zPnMJDGp3BxaL25k5iVQlhnv-Pk0Pss,109 -fastapi/middleware/wsgi.py,sha256=Z3Ue-7wni4lUZMvH3G9ek__acgYdJstbnpZX_HQAboY,79 -fastapi/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi/openapi/__pycache__/__init__.cpython-312.pyc,, -fastapi/openapi/__pycache__/constants.cpython-312.pyc,, -fastapi/openapi/__pycache__/docs.cpython-312.pyc,, -fastapi/openapi/__pycache__/models.cpython-312.pyc,, -fastapi/openapi/__pycache__/utils.cpython-312.pyc,, -fastapi/openapi/constants.py,sha256=adGzmis1L1HJRTE3kJ5fmHS_Noq6tIY6pWv_SFzoFDU,153 -fastapi/openapi/docs.py,sha256=zSDv4xY6XHcKsaG4zyk1HqSnrZtfZFBB0J7ZBk5YHPE,10345 -fastapi/openapi/models.py,sha256=PqkxQiqcEgjKuhfUIWPZPQcyTcubtUCB3vcObLsB7VE,15397 -fastapi/openapi/utils.py,sha256=e00G_p0IdpiffBUaq31BUyiloXbpld8RryKYnYKisdY,23964 -fastapi/param_functions.py,sha256=JHNPLIYvoAwdnZZavIVsxOat8x23fX_Kl33reh7HKl8,64019 -fastapi/params.py,sha256=g450axUBQgQJODdtM7WBxZbQj9Z64inFvadrgHikBbU,28237 -fastapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi/requests.py,sha256=zayepKFcienBllv3snmWI20Gk0oHNVLU4DDhqXBb4LU,142 -fastapi/responses.py,sha256=QNQQlwpKhQoIPZTTWkpc9d_QGeGZ_aVQPaDV3nQ8m7c,1761 -fastapi/routing.py,sha256=-SaOgqaseKw5mlTCk-FliS6Wx5la_CjdV5FqSPDmW9g,176337 -fastapi/security/__init__.py,sha256=bO8pNmxqVRXUjfl2mOKiVZLn0FpBQ61VUYVjmppnbJw,881 -fastapi/security/__pycache__/__init__.cpython-312.pyc,, -fastapi/security/__pycache__/api_key.cpython-312.pyc,, -fastapi/security/__pycache__/base.cpython-312.pyc,, -fastapi/security/__pycache__/http.cpython-312.pyc,, -fastapi/security/__pycache__/oauth2.cpython-312.pyc,, -fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc,, -fastapi/security/__pycache__/utils.cpython-312.pyc,, -fastapi/security/api_key.py,sha256=cBI5Z4zWVjL1uJrsjTeLy7MafHPAO2HQPzTrpyoIYWA,9094 -fastapi/security/base.py,sha256=dl4pvbC-RxjfbWgPtCWd8MVU-7CB2SZ22rJDXVCXO6c,141 -fastapi/security/http.py,sha256=rWR2x-5CUsjWmRucYthwRig6MG1o-boyrr4Xo-PuuxU,13606 -fastapi/security/oauth2.py,sha256=M1AFIDT7G3oQChq83poI3eg8ZDeibcvnGmya2CTS7JY,22036 -fastapi/security/open_id_connect_url.py,sha256=8vizZ2tGqEp1ur8SwtVgyHJhGAJ5AqahgcvSpaIioDI,2722 -fastapi/security/utils.py,sha256=bd8T0YM7UQD5ATKucr1bNtAvz_Y3__dVNAv5UebiPvc,293 -fastapi/staticfiles.py,sha256=iirGIt3sdY2QZXd36ijs3Cj-T0FuGFda3cd90kM9Ikw,69 -fastapi/templating.py,sha256=4zsuTWgcjcEainMJFAlW6-gnslm6AgOS1SiiDWfmQxk,76 -fastapi/testclient.py,sha256=nBvaAmX66YldReJNZXPOk1sfuo2Q6hs8bOvIaCep6LQ,66 -fastapi/types.py,sha256=nFb36sK3DSoqoyo7Miwy3meKK5UdFBgkAgLSzQlUVyI,383 -fastapi/utils.py,sha256=y8Bj5ttMaI9tS4D60OUgXqKnktBr99NdYUnHHV9LgoY,7948 -fastapi/websockets.py,sha256=419uncYObEKZG0YcrXscfQQYLSWoE10jqxVMetGdR98,222 diff --git a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/REQUESTED deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/WHEEL deleted file mode 100644 index 045c8acd..00000000 --- a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: pdm-backend (2.4.5) -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/entry_points.txt deleted file mode 100644 index b81849e1..00000000 --- a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/entry_points.txt +++ /dev/null @@ -1,5 +0,0 @@ -[console_scripts] -fastapi = fastapi.cli:main - -[gui_scripts] - diff --git a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/licenses/LICENSE deleted file mode 100644 index 3e92463e..00000000 --- a/venv/lib/python3.12/site-packages/fastapi-0.116.1.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Sebastián Ramírez - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/fastapi/__init__.py b/venv/lib/python3.12/site-packages/fastapi/__init__.py deleted file mode 100644 index b02bf8b4..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -"""FastAPI framework, high performance, easy to learn, fast to code, ready for production""" - -__version__ = "0.116.1" - -from starlette import status as status - -from .applications import FastAPI as FastAPI -from .background import BackgroundTasks as BackgroundTasks -from .datastructures import UploadFile as UploadFile -from .exceptions import HTTPException as HTTPException -from .exceptions import WebSocketException as WebSocketException -from .param_functions import Body as Body -from .param_functions import Cookie as Cookie -from .param_functions import Depends as Depends -from .param_functions import File as File -from .param_functions import Form as Form -from .param_functions import Header as Header -from .param_functions import Path as Path -from .param_functions import Query as Query -from .param_functions import Security as Security -from .requests import Request as Request -from .responses import Response as Response -from .routing import APIRouter as APIRouter -from .websockets import WebSocket as WebSocket -from .websockets import WebSocketDisconnect as WebSocketDisconnect diff --git a/venv/lib/python3.12/site-packages/fastapi/__main__.py b/venv/lib/python3.12/site-packages/fastapi/__main__.py deleted file mode 100644 index fc36465f..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from fastapi.cli import main - -main() diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c9f1fc98..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/__main__.cpython-312.pyc deleted file mode 100644 index 1390fffd..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/__main__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/_compat.cpython-312.pyc deleted file mode 100644 index 6955b109..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/_compat.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/applications.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/applications.cpython-312.pyc deleted file mode 100644 index 260f9f8a..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/applications.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/background.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/background.cpython-312.pyc deleted file mode 100644 index dc8d0972..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/background.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/cli.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/cli.cpython-312.pyc deleted file mode 100644 index 9bd59bd6..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/cli.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/concurrency.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/concurrency.cpython-312.pyc deleted file mode 100644 index 9e934cc4..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/concurrency.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/datastructures.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/datastructures.cpython-312.pyc deleted file mode 100644 index fdc25fbf..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/datastructures.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/encoders.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/encoders.cpython-312.pyc deleted file mode 100644 index 7841d85e..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/encoders.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/exception_handlers.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/exception_handlers.cpython-312.pyc deleted file mode 100644 index af0d3db5..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/exception_handlers.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index c92ccabe..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/logger.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/logger.cpython-312.pyc deleted file mode 100644 index 6677ac55..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/logger.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/param_functions.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/param_functions.cpython-312.pyc deleted file mode 100644 index 2aaa87a8..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/param_functions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/params.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/params.cpython-312.pyc deleted file mode 100644 index de4e50f4..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/params.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/requests.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/requests.cpython-312.pyc deleted file mode 100644 index f590c1e9..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/requests.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/responses.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/responses.cpython-312.pyc deleted file mode 100644 index b030fd91..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/responses.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/routing.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/routing.cpython-312.pyc deleted file mode 100644 index e6d88162..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/routing.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/staticfiles.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/staticfiles.cpython-312.pyc deleted file mode 100644 index a2fd64e4..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/staticfiles.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/templating.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/templating.cpython-312.pyc deleted file mode 100644 index 1f714cd8..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/templating.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc deleted file mode 100644 index 9df9034f..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/testclient.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/types.cpython-312.pyc deleted file mode 100644 index 0c851651..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/types.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index c6adf3b2..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/__pycache__/websockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/__pycache__/websockets.cpython-312.pyc deleted file mode 100644 index db45563b..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/__pycache__/websockets.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/_compat.py b/venv/lib/python3.12/site-packages/fastapi/_compat.py deleted file mode 100644 index 227ad837..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/_compat.py +++ /dev/null @@ -1,664 +0,0 @@ -from collections import deque -from copy import copy -from dataclasses import dataclass, is_dataclass -from enum import Enum -from functools import lru_cache -from typing import ( - Any, - Callable, - Deque, - Dict, - FrozenSet, - List, - Mapping, - Sequence, - Set, - Tuple, - Type, - Union, - cast, -) - -from fastapi.exceptions import RequestErrorModel -from fastapi.types import IncEx, ModelNameMap, UnionType -from pydantic import BaseModel, create_model -from pydantic.version import VERSION as PYDANTIC_VERSION -from starlette.datastructures import UploadFile -from typing_extensions import Annotated, Literal, get_args, get_origin - -PYDANTIC_VERSION_MINOR_TUPLE = tuple(int(x) for x in PYDANTIC_VERSION.split(".")[:2]) -PYDANTIC_V2 = PYDANTIC_VERSION_MINOR_TUPLE[0] == 2 - - -sequence_annotation_to_type = { - Sequence: list, - List: list, - list: list, - Tuple: tuple, - tuple: tuple, - Set: set, - set: set, - FrozenSet: frozenset, - frozenset: frozenset, - Deque: deque, - deque: deque, -} - -sequence_types = tuple(sequence_annotation_to_type.keys()) - -Url: Type[Any] - -if PYDANTIC_V2: - from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError - from pydantic import TypeAdapter - from pydantic import ValidationError as ValidationError - from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined] - GetJsonSchemaHandler as GetJsonSchemaHandler, - ) - from pydantic._internal._typing_extra import eval_type_lenient - from pydantic._internal._utils import lenient_issubclass as lenient_issubclass - from pydantic.fields import FieldInfo - from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema - from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue - from pydantic_core import CoreSchema as CoreSchema - from pydantic_core import PydanticUndefined, PydanticUndefinedType - from pydantic_core import Url as Url - - try: - from pydantic_core.core_schema import ( - with_info_plain_validator_function as with_info_plain_validator_function, - ) - except ImportError: # pragma: no cover - from pydantic_core.core_schema import ( - general_plain_validator_function as with_info_plain_validator_function, # noqa: F401 - ) - - RequiredParam = PydanticUndefined - Undefined = PydanticUndefined - UndefinedType = PydanticUndefinedType - evaluate_forwardref = eval_type_lenient - Validator = Any - - class BaseConfig: - pass - - class ErrorWrapper(Exception): - pass - - @dataclass - class ModelField: - field_info: FieldInfo - name: str - mode: Literal["validation", "serialization"] = "validation" - - @property - def alias(self) -> str: - a = self.field_info.alias - return a if a is not None else self.name - - @property - def required(self) -> bool: - return self.field_info.is_required() - - @property - def default(self) -> Any: - return self.get_default() - - @property - def type_(self) -> Any: - return self.field_info.annotation - - def __post_init__(self) -> None: - self._type_adapter: TypeAdapter[Any] = TypeAdapter( - Annotated[self.field_info.annotation, self.field_info] - ) - - def get_default(self) -> Any: - if self.field_info.is_required(): - return Undefined - return self.field_info.get_default(call_default_factory=True) - - def validate( - self, - value: Any, - values: Dict[str, Any] = {}, # noqa: B006 - *, - loc: Tuple[Union[int, str], ...] = (), - ) -> Tuple[Any, Union[List[Dict[str, Any]], None]]: - try: - return ( - self._type_adapter.validate_python(value, from_attributes=True), - None, - ) - except ValidationError as exc: - return None, _regenerate_error_with_loc( - errors=exc.errors(include_url=False), loc_prefix=loc - ) - - def serialize( - self, - value: Any, - *, - mode: Literal["json", "python"] = "json", - include: Union[IncEx, None] = None, - exclude: Union[IncEx, None] = None, - by_alias: bool = True, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - ) -> Any: - # What calls this code passes a value that already called - # self._type_adapter.validate_python(value) - return self._type_adapter.dump_python( - value, - mode=mode, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - def __hash__(self) -> int: - # Each ModelField is unique for our purposes, to allow making a dict from - # ModelField to its JSON Schema. - return id(self) - - def get_annotation_from_field_info( - annotation: Any, field_info: FieldInfo, field_name: str - ) -> Any: - return annotation - - def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]: - return errors # type: ignore[return-value] - - def _model_rebuild(model: Type[BaseModel]) -> None: - model.model_rebuild() - - def _model_dump( - model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any - ) -> Any: - return model.model_dump(mode=mode, **kwargs) - - def _get_model_config(model: BaseModel) -> Any: - return model.model_config - - def get_schema_from_model_field( - *, - field: ModelField, - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, - ) -> Dict[str, Any]: - override_mode: Union[Literal["validation"], None] = ( - None if separate_input_output_schemas else "validation" - ) - # This expects that GenerateJsonSchema was already used to generate the definitions - json_schema = field_mapping[(field, override_mode or field.mode)] - if "$ref" not in json_schema: - # TODO remove when deprecating Pydantic v1 - # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207 - json_schema["title"] = ( - field.field_info.title or field.alias.title().replace("_", " ") - ) - return json_schema - - def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap: - return {} - - def get_definitions( - *, - fields: List[ModelField], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - separate_input_output_schemas: bool = True, - ) -> Tuple[ - Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - Dict[str, Dict[str, Any]], - ]: - override_mode: Union[Literal["validation"], None] = ( - None if separate_input_output_schemas else "validation" - ) - inputs = [ - (field, override_mode or field.mode, field._type_adapter.core_schema) - for field in fields - ] - field_mapping, definitions = schema_generator.generate_definitions( - inputs=inputs - ) - for item_def in cast(Dict[str, Dict[str, Any]], definitions).values(): - if "description" in item_def: - item_description = cast(str, item_def["description"]).split("\f")[0] - item_def["description"] = item_description - return field_mapping, definitions # type: ignore[return-value] - - def is_scalar_field(field: ModelField) -> bool: - from fastapi import params - - return field_annotation_is_scalar( - field.field_info.annotation - ) and not isinstance(field.field_info, params.Body) - - def is_sequence_field(field: ModelField) -> bool: - return field_annotation_is_sequence(field.field_info.annotation) - - def is_scalar_sequence_field(field: ModelField) -> bool: - return field_annotation_is_scalar_sequence(field.field_info.annotation) - - def is_bytes_field(field: ModelField) -> bool: - return is_bytes_or_nonable_bytes_annotation(field.type_) - - def is_bytes_sequence_field(field: ModelField) -> bool: - return is_bytes_sequence_annotation(field.type_) - - def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo: - cls = type(field_info) - merged_field_info = cls.from_annotation(annotation) - new_field_info = copy(field_info) - new_field_info.metadata = merged_field_info.metadata - new_field_info.annotation = merged_field_info.annotation - return new_field_info - - def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]: - origin_type = ( - get_origin(field.field_info.annotation) or field.field_info.annotation - ) - assert issubclass(origin_type, sequence_types) # type: ignore[arg-type] - return sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return] - - def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]: - error = ValidationError.from_exception_data( - "Field required", [{"type": "missing", "loc": loc, "input": {}}] - ).errors(include_url=False)[0] - error["input"] = None - return error # type: ignore[return-value] - - def create_body_model( - *, fields: Sequence[ModelField], model_name: str - ) -> Type[BaseModel]: - field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields} - BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload] - return BodyModel - - def get_model_fields(model: Type[BaseModel]) -> List[ModelField]: - return [ - ModelField(field_info=field_info, name=name) - for name, field_info in model.model_fields.items() - ] - -else: - from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX - from pydantic import AnyUrl as Url # noqa: F401 - from pydantic import ( # type: ignore[assignment] - BaseConfig as BaseConfig, # noqa: F401 - ) - from pydantic import ValidationError as ValidationError # noqa: F401 - from pydantic.class_validators import ( # type: ignore[no-redef] - Validator as Validator, # noqa: F401 - ) - from pydantic.error_wrappers import ( # type: ignore[no-redef] - ErrorWrapper as ErrorWrapper, # noqa: F401 - ) - from pydantic.errors import MissingError - from pydantic.fields import ( # type: ignore[attr-defined] - SHAPE_FROZENSET, - SHAPE_LIST, - SHAPE_SEQUENCE, - SHAPE_SET, - SHAPE_SINGLETON, - SHAPE_TUPLE, - SHAPE_TUPLE_ELLIPSIS, - ) - from pydantic.fields import FieldInfo as FieldInfo - from pydantic.fields import ( # type: ignore[no-redef,attr-defined] - ModelField as ModelField, # noqa: F401 - ) - - # Keeping old "Required" functionality from Pydantic V1, without - # shadowing typing.Required. - RequiredParam: Any = Ellipsis # type: ignore[no-redef] - from pydantic.fields import ( # type: ignore[no-redef,attr-defined] - Undefined as Undefined, - ) - from pydantic.fields import ( # type: ignore[no-redef, attr-defined] - UndefinedType as UndefinedType, # noqa: F401 - ) - from pydantic.schema import ( - field_schema, - get_flat_models_from_fields, - get_model_name_map, - model_process_schema, - ) - from pydantic.schema import ( # type: ignore[no-redef] # noqa: F401 - get_annotation_from_field_info as get_annotation_from_field_info, - ) - from pydantic.typing import ( # type: ignore[no-redef] - evaluate_forwardref as evaluate_forwardref, # noqa: F401 - ) - from pydantic.utils import ( # type: ignore[no-redef] - lenient_issubclass as lenient_issubclass, # noqa: F401 - ) - - GetJsonSchemaHandler = Any # type: ignore[assignment,misc] - JsonSchemaValue = Dict[str, Any] # type: ignore[misc] - CoreSchema = Any # type: ignore[assignment,misc] - - sequence_shapes = { - SHAPE_LIST, - SHAPE_SET, - SHAPE_FROZENSET, - SHAPE_TUPLE, - SHAPE_SEQUENCE, - SHAPE_TUPLE_ELLIPSIS, - } - sequence_shape_to_type = { - SHAPE_LIST: list, - SHAPE_SET: set, - SHAPE_TUPLE: tuple, - SHAPE_SEQUENCE: list, - SHAPE_TUPLE_ELLIPSIS: list, - } - - @dataclass - class GenerateJsonSchema: # type: ignore[no-redef] - ref_template: str - - class PydanticSchemaGenerationError(Exception): # type: ignore[no-redef] - pass - - def with_info_plain_validator_function( # type: ignore[misc] - function: Callable[..., Any], - *, - ref: Union[str, None] = None, - metadata: Any = None, - serialization: Any = None, - ) -> Any: - return {} - - def get_model_definitions( - *, - flat_models: Set[Union[Type[BaseModel], Type[Enum]]], - model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], - ) -> Dict[str, Any]: - definitions: Dict[str, Dict[str, Any]] = {} - for model in flat_models: - m_schema, m_definitions, m_nested_models = model_process_schema( - model, model_name_map=model_name_map, ref_prefix=REF_PREFIX - ) - definitions.update(m_definitions) - model_name = model_name_map[model] - if "description" in m_schema: - m_schema["description"] = m_schema["description"].split("\f")[0] - definitions[model_name] = m_schema - return definitions - - def is_pv1_scalar_field(field: ModelField) -> bool: - from fastapi import params - - field_info = field.field_info - if not ( - field.shape == SHAPE_SINGLETON # type: ignore[attr-defined] - and not lenient_issubclass(field.type_, BaseModel) - and not lenient_issubclass(field.type_, dict) - and not field_annotation_is_sequence(field.type_) - and not is_dataclass(field.type_) - and not isinstance(field_info, params.Body) - ): - return False - if field.sub_fields: # type: ignore[attr-defined] - if not all( - is_pv1_scalar_field(f) - for f in field.sub_fields # type: ignore[attr-defined] - ): - return False - return True - - def is_pv1_scalar_sequence_field(field: ModelField) -> bool: - if (field.shape in sequence_shapes) and not lenient_issubclass( # type: ignore[attr-defined] - field.type_, BaseModel - ): - if field.sub_fields is not None: # type: ignore[attr-defined] - for sub_field in field.sub_fields: # type: ignore[attr-defined] - if not is_pv1_scalar_field(sub_field): - return False - return True - if _annotation_is_sequence(field.type_): - return True - return False - - def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]: - use_errors: List[Any] = [] - for error in errors: - if isinstance(error, ErrorWrapper): - new_errors = ValidationError( # type: ignore[call-arg] - errors=[error], model=RequestErrorModel - ).errors() - use_errors.extend(new_errors) - elif isinstance(error, list): - use_errors.extend(_normalize_errors(error)) - else: - use_errors.append(error) - return use_errors - - def _model_rebuild(model: Type[BaseModel]) -> None: - model.update_forward_refs() - - def _model_dump( - model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any - ) -> Any: - return model.dict(**kwargs) - - def _get_model_config(model: BaseModel) -> Any: - return model.__config__ # type: ignore[attr-defined] - - def get_schema_from_model_field( - *, - field: ModelField, - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, - ) -> Dict[str, Any]: - # This expects that GenerateJsonSchema was already used to generate the definitions - return field_schema( # type: ignore[no-any-return] - field, model_name_map=model_name_map, ref_prefix=REF_PREFIX - )[0] - - def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap: - models = get_flat_models_from_fields(fields, known_models=set()) - return get_model_name_map(models) # type: ignore[no-any-return] - - def get_definitions( - *, - fields: List[ModelField], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - separate_input_output_schemas: bool = True, - ) -> Tuple[ - Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - Dict[str, Dict[str, Any]], - ]: - models = get_flat_models_from_fields(fields, known_models=set()) - return {}, get_model_definitions( - flat_models=models, model_name_map=model_name_map - ) - - def is_scalar_field(field: ModelField) -> bool: - return is_pv1_scalar_field(field) - - def is_sequence_field(field: ModelField) -> bool: - return field.shape in sequence_shapes or _annotation_is_sequence(field.type_) # type: ignore[attr-defined] - - def is_scalar_sequence_field(field: ModelField) -> bool: - return is_pv1_scalar_sequence_field(field) - - def is_bytes_field(field: ModelField) -> bool: - return lenient_issubclass(field.type_, bytes) - - def is_bytes_sequence_field(field: ModelField) -> bool: - return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes) # type: ignore[attr-defined] - - def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo: - return copy(field_info) - - def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]: - return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return,attr-defined] - - def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]: - missing_field_error = ErrorWrapper(MissingError(), loc=loc) # type: ignore[call-arg] - new_error = ValidationError([missing_field_error], RequestErrorModel) - return new_error.errors()[0] # type: ignore[return-value] - - def create_body_model( - *, fields: Sequence[ModelField], model_name: str - ) -> Type[BaseModel]: - BodyModel = create_model(model_name) - for f in fields: - BodyModel.__fields__[f.name] = f # type: ignore[index] - return BodyModel - - def get_model_fields(model: Type[BaseModel]) -> List[ModelField]: - return list(model.__fields__.values()) # type: ignore[attr-defined] - - -def _regenerate_error_with_loc( - *, errors: Sequence[Any], loc_prefix: Tuple[Union[str, int], ...] -) -> List[Dict[str, Any]]: - updated_loc_errors: List[Any] = [ - {**err, "loc": loc_prefix + err.get("loc", ())} - for err in _normalize_errors(errors) - ] - - return updated_loc_errors - - -def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool: - if lenient_issubclass(annotation, (str, bytes)): - return False - return lenient_issubclass(annotation, sequence_types) - - -def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - for arg in get_args(annotation): - if field_annotation_is_sequence(arg): - return True - return False - return _annotation_is_sequence(annotation) or _annotation_is_sequence( - get_origin(annotation) - ) - - -def value_is_sequence(value: Any) -> bool: - return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type] - - -def _annotation_is_complex(annotation: Union[Type[Any], None]) -> bool: - return ( - lenient_issubclass(annotation, (BaseModel, Mapping, UploadFile)) - or _annotation_is_sequence(annotation) - or is_dataclass(annotation) - ) - - -def field_annotation_is_complex(annotation: Union[Type[Any], None]) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - return any(field_annotation_is_complex(arg) for arg in get_args(annotation)) - - return ( - _annotation_is_complex(annotation) - or _annotation_is_complex(origin) - or hasattr(origin, "__pydantic_core_schema__") - or hasattr(origin, "__get_pydantic_core_schema__") - ) - - -def field_annotation_is_scalar(annotation: Any) -> bool: - # handle Ellipsis here to make tuple[int, ...] work nicely - return annotation is Ellipsis or not field_annotation_is_complex(annotation) - - -def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - at_least_one_scalar_sequence = False - for arg in get_args(annotation): - if field_annotation_is_scalar_sequence(arg): - at_least_one_scalar_sequence = True - continue - elif not field_annotation_is_scalar(arg): - return False - return at_least_one_scalar_sequence - return field_annotation_is_sequence(annotation) and all( - field_annotation_is_scalar(sub_annotation) - for sub_annotation in get_args(annotation) - ) - - -def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool: - if lenient_issubclass(annotation, bytes): - return True - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - for arg in get_args(annotation): - if lenient_issubclass(arg, bytes): - return True - return False - - -def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool: - if lenient_issubclass(annotation, UploadFile): - return True - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - for arg in get_args(annotation): - if lenient_issubclass(arg, UploadFile): - return True - return False - - -def is_bytes_sequence_annotation(annotation: Any) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - at_least_one = False - for arg in get_args(annotation): - if is_bytes_sequence_annotation(arg): - at_least_one = True - continue - return at_least_one - return field_annotation_is_sequence(annotation) and all( - is_bytes_or_nonable_bytes_annotation(sub_annotation) - for sub_annotation in get_args(annotation) - ) - - -def is_uploadfile_sequence_annotation(annotation: Any) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - at_least_one = False - for arg in get_args(annotation): - if is_uploadfile_sequence_annotation(arg): - at_least_one = True - continue - return at_least_one - return field_annotation_is_sequence(annotation) and all( - is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation) - for sub_annotation in get_args(annotation) - ) - - -@lru_cache -def get_cached_model_fields(model: Type[BaseModel]) -> List[ModelField]: - return get_model_fields(model) diff --git a/venv/lib/python3.12/site-packages/fastapi/applications.py b/venv/lib/python3.12/site-packages/fastapi/applications.py deleted file mode 100644 index 05c7bd2b..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/applications.py +++ /dev/null @@ -1,4588 +0,0 @@ -from enum import Enum -from typing import ( - Any, - Awaitable, - Callable, - Coroutine, - Dict, - List, - Optional, - Sequence, - Type, - TypeVar, - Union, -) - -from fastapi import routing -from fastapi.datastructures import Default, DefaultPlaceholder -from fastapi.exception_handlers import ( - http_exception_handler, - request_validation_exception_handler, - websocket_request_validation_exception_handler, -) -from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError -from fastapi.logger import logger -from fastapi.openapi.docs import ( - get_redoc_html, - get_swagger_ui_html, - get_swagger_ui_oauth2_redirect_html, -) -from fastapi.openapi.utils import get_openapi -from fastapi.params import Depends -from fastapi.types import DecoratedCallable, IncEx -from fastapi.utils import generate_unique_id -from starlette.applications import Starlette -from starlette.datastructures import State -from starlette.exceptions import HTTPException -from starlette.middleware import Middleware -from starlette.middleware.base import BaseHTTPMiddleware -from starlette.requests import Request -from starlette.responses import HTMLResponse, JSONResponse, Response -from starlette.routing import BaseRoute -from starlette.types import ASGIApp, Lifespan, Receive, Scope, Send -from typing_extensions import Annotated, Doc, deprecated - -AppType = TypeVar("AppType", bound="FastAPI") - - -class FastAPI(Starlette): - """ - `FastAPI` app class, the main entrypoint to use FastAPI. - - Read more in the - [FastAPI docs for First Steps](https://fastapi.tiangolo.com/tutorial/first-steps/). - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - ``` - """ - - def __init__( - self: AppType, - *, - debug: Annotated[ - bool, - Doc( - """ - Boolean indicating if debug tracebacks should be returned on server - errors. - - Read more in the - [Starlette docs for Applications](https://www.starlette.io/applications/#instantiating-the-application). - """ - ), - ] = False, - routes: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - **Note**: you probably shouldn't use this parameter, it is inherited - from Starlette and supported for compatibility. - - --- - - A list of routes to serve incoming HTTP and WebSocket requests. - """ - ), - deprecated( - """ - You normally wouldn't use this parameter with FastAPI, it is inherited - from Starlette and supported for compatibility. - - In FastAPI, you normally would use the *path operation methods*, - like `app.get()`, `app.post()`, etc. - """ - ), - ] = None, - title: Annotated[ - str, - Doc( - """ - The title of the API. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(title="ChimichangApp") - ``` - """ - ), - ] = "FastAPI", - summary: Annotated[ - Optional[str], - Doc( - """ - A short summary of the API. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(summary="Deadpond's favorite app. Nuff said.") - ``` - """ - ), - ] = None, - description: Annotated[ - str, - Doc( - ''' - A description of the API. Supports Markdown (using - [CommonMark syntax](https://commonmark.org/)). - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI( - description=""" - ChimichangApp API helps you do awesome stuff. 🚀 - - ## Items - - You can **read items**. - - ## Users - - You will be able to: - - * **Create users** (_not implemented_). - * **Read users** (_not implemented_). - - """ - ) - ``` - ''' - ), - ] = "", - version: Annotated[ - str, - Doc( - """ - The version of the API. - - **Note** This is the version of your application, not the version of - the OpenAPI specification nor the version of FastAPI being used. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(version="0.0.1") - ``` - """ - ), - ] = "0.1.0", - openapi_url: Annotated[ - Optional[str], - Doc( - """ - The URL where the OpenAPI schema will be served from. - - If you set it to `None`, no OpenAPI schema will be served publicly, and - the default automatic endpoints `/docs` and `/redoc` will also be - disabled. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#openapi-url). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(openapi_url="/api/v1/openapi.json") - ``` - """ - ), - ] = "/openapi.json", - openapi_tags: Annotated[ - Optional[List[Dict[str, Any]]], - Doc( - """ - A list of tags used by OpenAPI, these are the same `tags` you can set - in the *path operations*, like: - - * `@app.get("/users/", tags=["users"])` - * `@app.get("/items/", tags=["items"])` - - The order of the tags can be used to specify the order shown in - tools like Swagger UI, used in the automatic path `/docs`. - - It's not required to specify all the tags used. - - The tags that are not declared MAY be organized randomly or based - on the tools' logic. Each tag name in the list MUST be unique. - - The value of each item is a `dict` containing: - - * `name`: The name of the tag. - * `description`: A short description of the tag. - [CommonMark syntax](https://commonmark.org/) MAY be used for rich - text representation. - * `externalDocs`: Additional external documentation for this tag. If - provided, it would contain a `dict` with: - * `description`: A short description of the target documentation. - [CommonMark syntax](https://commonmark.org/) MAY be used for - rich text representation. - * `url`: The URL for the target documentation. Value MUST be in - the form of a URL. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-tags). - - **Example** - - ```python - from fastapi import FastAPI - - tags_metadata = [ - { - "name": "users", - "description": "Operations with users. The **login** logic is also here.", - }, - { - "name": "items", - "description": "Manage items. So _fancy_ they have their own docs.", - "externalDocs": { - "description": "Items external docs", - "url": "https://fastapi.tiangolo.com/", - }, - }, - ] - - app = FastAPI(openapi_tags=tags_metadata) - ``` - """ - ), - ] = None, - servers: Annotated[ - Optional[List[Dict[str, Union[str, Any]]]], - Doc( - """ - A `list` of `dict`s with connectivity information to a target server. - - You would use it, for example, if your application is served from - different domains and you want to use the same Swagger UI in the - browser to interact with each of them (instead of having multiple - browser tabs open). Or if you want to leave fixed the possible URLs. - - If the servers `list` is not provided, or is an empty `list`, the - default value would be a `dict` with a `url` value of `/`. - - Each item in the `list` is a `dict` containing: - - * `url`: A URL to the target host. This URL supports Server Variables - and MAY be relative, to indicate that the host location is relative - to the location where the OpenAPI document is being served. Variable - substitutions will be made when a variable is named in `{`brackets`}`. - * `description`: An optional string describing the host designated by - the URL. [CommonMark syntax](https://commonmark.org/) MAY be used for - rich text representation. - * `variables`: A `dict` between a variable name and its value. The value - is used for substitution in the server's URL template. - - Read more in the - [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#additional-servers). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI( - servers=[ - {"url": "https://stag.example.com", "description": "Staging environment"}, - {"url": "https://prod.example.com", "description": "Production environment"}, - ] - ) - ``` - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of global dependencies, they will be applied to each - *path operation*, including in sub-routers. - - Read more about it in the - [FastAPI docs for Global Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/global-dependencies/). - - **Example** - - ```python - from fastapi import Depends, FastAPI - - from .dependencies import func_dep_1, func_dep_2 - - app = FastAPI(dependencies=[Depends(func_dep_1), Depends(func_dep_2)]) - ``` - """ - ), - ] = None, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - The default response class to be used. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - - **Example** - - ```python - from fastapi import FastAPI - from fastapi.responses import ORJSONResponse - - app = FastAPI(default_response_class=ORJSONResponse) - ``` - """ - ), - ] = Default(JSONResponse), - redirect_slashes: Annotated[ - bool, - Doc( - """ - Whether to detect and redirect slashes in URLs when the client doesn't - use the same format. - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(redirect_slashes=True) # the default - - @app.get("/items/") - async def read_items(): - return [{"item_id": "Foo"}] - ``` - - With this app, if a client goes to `/items` (without a trailing slash), - they will be automatically redirected with an HTTP status code of 307 - to `/items/`. - """ - ), - ] = True, - docs_url: Annotated[ - Optional[str], - Doc( - """ - The path to the automatic interactive API documentation. - It is handled in the browser by Swagger UI. - - The default URL is `/docs`. You can disable it by setting it to `None`. - - If `openapi_url` is set to `None`, this will be automatically disabled. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(docs_url="/documentation", redoc_url=None) - ``` - """ - ), - ] = "/docs", - redoc_url: Annotated[ - Optional[str], - Doc( - """ - The path to the alternative automatic interactive API documentation - provided by ReDoc. - - The default URL is `/redoc`. You can disable it by setting it to `None`. - - If `openapi_url` is set to `None`, this will be automatically disabled. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(docs_url="/documentation", redoc_url="redocumentation") - ``` - """ - ), - ] = "/redoc", - swagger_ui_oauth2_redirect_url: Annotated[ - Optional[str], - Doc( - """ - The OAuth2 redirect endpoint for the Swagger UI. - - By default it is `/docs/oauth2-redirect`. - - This is only used if you use OAuth2 (with the "Authorize" button) - with Swagger UI. - """ - ), - ] = "/docs/oauth2-redirect", - swagger_ui_init_oauth: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - OAuth2 configuration for the Swagger UI, by default shown at `/docs`. - - Read more about the available configuration options in the - [Swagger UI docs](https://swagger.io/docs/open-source-tools/swagger-ui/usage/oauth2/). - """ - ), - ] = None, - middleware: Annotated[ - Optional[Sequence[Middleware]], - Doc( - """ - List of middleware to be added when creating the application. - - In FastAPI you would normally do this with `app.add_middleware()` - instead. - - Read more in the - [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). - """ - ), - ] = None, - exception_handlers: Annotated[ - Optional[ - Dict[ - Union[int, Type[Exception]], - Callable[[Request, Any], Coroutine[Any, Any, Response]], - ] - ], - Doc( - """ - A dictionary with handlers for exceptions. - - In FastAPI, you would normally use the decorator - `@app.exception_handler()`. - - Read more in the - [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). - """ - ), - ] = None, - on_startup: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of startup event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - on_shutdown: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of shutdown event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - lifespan: Annotated[ - Optional[Lifespan[AppType]], - Doc( - """ - A `Lifespan` context manager handler. This replaces `startup` and - `shutdown` functions with a single context manager. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - terms_of_service: Annotated[ - Optional[str], - Doc( - """ - A URL to the Terms of Service for your API. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more at the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - app = FastAPI(terms_of_service="http://example.com/terms/") - ``` - """ - ), - ] = None, - contact: Annotated[ - Optional[Dict[str, Union[str, Any]]], - Doc( - """ - A dictionary with the contact information for the exposed API. - - It can contain several fields. - - * `name`: (`str`) The name of the contact person/organization. - * `url`: (`str`) A URL pointing to the contact information. MUST be in - the format of a URL. - * `email`: (`str`) The email address of the contact person/organization. - MUST be in the format of an email address. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more at the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - app = FastAPI( - contact={ - "name": "Deadpoolio the Amazing", - "url": "http://x-force.example.com/contact/", - "email": "dp@x-force.example.com", - } - ) - ``` - """ - ), - ] = None, - license_info: Annotated[ - Optional[Dict[str, Union[str, Any]]], - Doc( - """ - A dictionary with the license information for the exposed API. - - It can contain several fields. - - * `name`: (`str`) **REQUIRED** (if a `license_info` is set). The - license name used for the API. - * `identifier`: (`str`) An [SPDX](https://spdx.dev/) license expression - for the API. The `identifier` field is mutually exclusive of the `url` - field. Available since OpenAPI 3.1.0, FastAPI 0.99.0. - * `url`: (`str`) A URL to the license used for the API. This MUST be - the format of a URL. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more at the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - app = FastAPI( - license_info={ - "name": "Apache 2.0", - "url": "https://www.apache.org/licenses/LICENSE-2.0.html", - } - ) - ``` - """ - ), - ] = None, - openapi_prefix: Annotated[ - str, - Doc( - """ - A URL prefix for the OpenAPI URL. - """ - ), - deprecated( - """ - "openapi_prefix" has been deprecated in favor of "root_path", which - follows more closely the ASGI standard, is simpler, and more - automatic. - """ - ), - ] = "", - root_path: Annotated[ - str, - Doc( - """ - A path prefix handled by a proxy that is not seen by the application - but is seen by external clients, which affects things like Swagger UI. - - Read more about it at the - [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(root_path="/api/v1") - ``` - """ - ), - ] = "", - root_path_in_servers: Annotated[ - bool, - Doc( - """ - To disable automatically generating the URLs in the `servers` field - in the autogenerated OpenAPI using the `root_path`. - - Read more about it in the - [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#disable-automatic-server-from-root_path). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(root_path_in_servers=False) - ``` - """ - ), - ] = True, - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - OpenAPI callbacks that should apply to all *path operations*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - webhooks: Annotated[ - Optional[routing.APIRouter], - Doc( - """ - Add OpenAPI webhooks. This is similar to `callbacks` but it doesn't - depend on specific *path operations*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - **Note**: This is available since OpenAPI 3.1.0, FastAPI 0.99.0. - - Read more about it in the - [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all *path operations* as deprecated. You probably don't need it, - but it's available. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) all the *path operations* in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - swagger_ui_parameters: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Parameters to configure Swagger UI, the autogenerated interactive API - documentation (by default at `/docs`). - - Read more about it in the - [FastAPI docs about how to Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - separate_input_output_schemas: Annotated[ - bool, - Doc( - """ - Whether to generate separate OpenAPI schemas for request body and - response body when the results would be more precise. - - This is particularly useful when automatically generating clients. - - For example, if you have a model like: - - ```python - from pydantic import BaseModel - - class Item(BaseModel): - name: str - tags: list[str] = [] - ``` - - When `Item` is used for input, a request body, `tags` is not required, - the client doesn't have to provide it. - - But when using `Item` for output, for a response body, `tags` is always - available because it has a default value, even if it's just an empty - list. So, the client should be able to always expect it. - - In this case, there would be two different schemas, one for input and - another one for output. - """ - ), - ] = True, - **extra: Annotated[ - Any, - Doc( - """ - Extra keyword arguments to be stored in the app, not used by FastAPI - anywhere. - """ - ), - ], - ) -> None: - self.debug = debug - self.title = title - self.summary = summary - self.description = description - self.version = version - self.terms_of_service = terms_of_service - self.contact = contact - self.license_info = license_info - self.openapi_url = openapi_url - self.openapi_tags = openapi_tags - self.root_path_in_servers = root_path_in_servers - self.docs_url = docs_url - self.redoc_url = redoc_url - self.swagger_ui_oauth2_redirect_url = swagger_ui_oauth2_redirect_url - self.swagger_ui_init_oauth = swagger_ui_init_oauth - self.swagger_ui_parameters = swagger_ui_parameters - self.servers = servers or [] - self.separate_input_output_schemas = separate_input_output_schemas - self.extra = extra - self.openapi_version: Annotated[ - str, - Doc( - """ - The version string of OpenAPI. - - FastAPI will generate OpenAPI version 3.1.0, and will output that as - the OpenAPI version. But some tools, even though they might be - compatible with OpenAPI 3.1.0, might not recognize it as a valid. - - So you could override this value to trick those tools into using - the generated OpenAPI. Have in mind that this is a hack. But if you - avoid using features added in OpenAPI 3.1.0, it might work for your - use case. - - This is not passed as a parameter to the `FastAPI` class to avoid - giving the false idea that FastAPI would generate a different OpenAPI - schema. It is only available as an attribute. - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI() - - app.openapi_version = "3.0.2" - ``` - """ - ), - ] = "3.1.0" - self.openapi_schema: Optional[Dict[str, Any]] = None - if self.openapi_url: - assert self.title, "A title must be provided for OpenAPI, e.g.: 'My API'" - assert self.version, "A version must be provided for OpenAPI, e.g.: '2.1.0'" - # TODO: remove when discarding the openapi_prefix parameter - if openapi_prefix: - logger.warning( - '"openapi_prefix" has been deprecated in favor of "root_path", which ' - "follows more closely the ASGI standard, is simpler, and more " - "automatic. Check the docs at " - "https://fastapi.tiangolo.com/advanced/sub-applications/" - ) - self.webhooks: Annotated[ - routing.APIRouter, - Doc( - """ - The `app.webhooks` attribute is an `APIRouter` with the *path - operations* that will be used just for documentation of webhooks. - - Read more about it in the - [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). - """ - ), - ] = webhooks or routing.APIRouter() - self.root_path = root_path or openapi_prefix - self.state: Annotated[ - State, - Doc( - """ - A state object for the application. This is the same object for the - entire application, it doesn't change from request to request. - - You normally wouldn't use this in FastAPI, for most of the cases you - would instead use FastAPI dependencies. - - This is simply inherited from Starlette. - - Read more about it in the - [Starlette docs for Applications](https://www.starlette.io/applications/#storing-state-on-the-app-instance). - """ - ), - ] = State() - self.dependency_overrides: Annotated[ - Dict[Callable[..., Any], Callable[..., Any]], - Doc( - """ - A dictionary with overrides for the dependencies. - - Each key is the original dependency callable, and the value is the - actual dependency that should be called. - - This is for testing, to replace expensive dependencies with testing - versions. - - Read more about it in the - [FastAPI docs for Testing Dependencies with Overrides](https://fastapi.tiangolo.com/advanced/testing-dependencies/). - """ - ), - ] = {} - self.router: routing.APIRouter = routing.APIRouter( - routes=routes, - redirect_slashes=redirect_slashes, - dependency_overrides_provider=self, - on_startup=on_startup, - on_shutdown=on_shutdown, - lifespan=lifespan, - default_response_class=default_response_class, - dependencies=dependencies, - callbacks=callbacks, - deprecated=deprecated, - include_in_schema=include_in_schema, - responses=responses, - generate_unique_id_function=generate_unique_id_function, - ) - self.exception_handlers: Dict[ - Any, Callable[[Request, Any], Union[Response, Awaitable[Response]]] - ] = {} if exception_handlers is None else dict(exception_handlers) - self.exception_handlers.setdefault(HTTPException, http_exception_handler) - self.exception_handlers.setdefault( - RequestValidationError, request_validation_exception_handler - ) - self.exception_handlers.setdefault( - WebSocketRequestValidationError, - # Starlette still has incorrect type specification for the handlers - websocket_request_validation_exception_handler, # type: ignore - ) - - self.user_middleware: List[Middleware] = ( - [] if middleware is None else list(middleware) - ) - self.middleware_stack: Union[ASGIApp, None] = None - self.setup() - - def openapi(self) -> Dict[str, Any]: - """ - Generate the OpenAPI schema of the application. This is called by FastAPI - internally. - - The first time it is called it stores the result in the attribute - `app.openapi_schema`, and next times it is called, it just returns that same - result. To avoid the cost of generating the schema every time. - - If you need to modify the generated OpenAPI schema, you could modify it. - - Read more in the - [FastAPI docs for OpenAPI](https://fastapi.tiangolo.com/how-to/extending-openapi/). - """ - if not self.openapi_schema: - self.openapi_schema = get_openapi( - title=self.title, - version=self.version, - openapi_version=self.openapi_version, - summary=self.summary, - description=self.description, - terms_of_service=self.terms_of_service, - contact=self.contact, - license_info=self.license_info, - routes=self.routes, - webhooks=self.webhooks.routes, - tags=self.openapi_tags, - servers=self.servers, - separate_input_output_schemas=self.separate_input_output_schemas, - ) - return self.openapi_schema - - def setup(self) -> None: - if self.openapi_url: - urls = (server_data.get("url") for server_data in self.servers) - server_urls = {url for url in urls if url} - - async def openapi(req: Request) -> JSONResponse: - root_path = req.scope.get("root_path", "").rstrip("/") - if root_path not in server_urls: - if root_path and self.root_path_in_servers: - self.servers.insert(0, {"url": root_path}) - server_urls.add(root_path) - return JSONResponse(self.openapi()) - - self.add_route(self.openapi_url, openapi, include_in_schema=False) - if self.openapi_url and self.docs_url: - - async def swagger_ui_html(req: Request) -> HTMLResponse: - root_path = req.scope.get("root_path", "").rstrip("/") - openapi_url = root_path + self.openapi_url - oauth2_redirect_url = self.swagger_ui_oauth2_redirect_url - if oauth2_redirect_url: - oauth2_redirect_url = root_path + oauth2_redirect_url - return get_swagger_ui_html( - openapi_url=openapi_url, - title=f"{self.title} - Swagger UI", - oauth2_redirect_url=oauth2_redirect_url, - init_oauth=self.swagger_ui_init_oauth, - swagger_ui_parameters=self.swagger_ui_parameters, - ) - - self.add_route(self.docs_url, swagger_ui_html, include_in_schema=False) - - if self.swagger_ui_oauth2_redirect_url: - - async def swagger_ui_redirect(req: Request) -> HTMLResponse: - return get_swagger_ui_oauth2_redirect_html() - - self.add_route( - self.swagger_ui_oauth2_redirect_url, - swagger_ui_redirect, - include_in_schema=False, - ) - if self.openapi_url and self.redoc_url: - - async def redoc_html(req: Request) -> HTMLResponse: - root_path = req.scope.get("root_path", "").rstrip("/") - openapi_url = root_path + self.openapi_url - return get_redoc_html( - openapi_url=openapi_url, title=f"{self.title} - ReDoc" - ) - - self.add_route(self.redoc_url, redoc_html, include_in_schema=False) - - async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: - if self.root_path: - scope["root_path"] = self.root_path - await super().__call__(scope, receive, send) - - def add_api_route( - self, - path: str, - endpoint: Callable[..., Any], - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[List[str]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Union[Type[Response], DefaultPlaceholder] = Default( - JSONResponse - ), - name: Optional[str] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( - generate_unique_id - ), - ) -> None: - self.router.add_api_route( - path, - endpoint=endpoint, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def api_route( - self, - path: str, - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[List[str]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Type[Response] = Default(JSONResponse), - name: Optional[str] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( - generate_unique_id - ), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.router.add_api_route( - path, - func, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - return func - - return decorator - - def add_api_websocket_route( - self, - path: str, - endpoint: Callable[..., Any], - name: Optional[str] = None, - *, - dependencies: Optional[Sequence[Depends]] = None, - ) -> None: - self.router.add_api_websocket_route( - path, - endpoint, - name=name, - dependencies=dependencies, - ) - - def websocket( - self, - path: Annotated[ - str, - Doc( - """ - WebSocket path. - """ - ), - ], - name: Annotated[ - Optional[str], - Doc( - """ - A name for the WebSocket. Only used internally. - """ - ), - ] = None, - *, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be used for this - WebSocket. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - """ - ), - ] = None, - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Decorate a WebSocket function. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - - **Example** - - ```python - from fastapi import FastAPI, WebSocket - - app = FastAPI() - - @app.websocket("/ws") - async def websocket_endpoint(websocket: WebSocket): - await websocket.accept() - while True: - data = await websocket.receive_text() - await websocket.send_text(f"Message text was: {data}") - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_api_websocket_route( - path, - func, - name=name, - dependencies=dependencies, - ) - return func - - return decorator - - def include_router( - self, - router: Annotated[routing.APIRouter, Doc("The `APIRouter` to include.")], - *, - prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to all the *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to all the - *path operations* in this router. - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - - **Example** - - ```python - from fastapi import Depends, FastAPI - - from .dependencies import get_token_header - from .internal import admin - - app = FastAPI() - - app.include_router( - admin.router, - dependencies=[Depends(get_token_header)], - ) - ``` - """ - ), - ] = None, - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all the *path operations* in this router as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - **Example** - - ```python - from fastapi import FastAPI - - from .internal import old_api - - app = FastAPI() - - app.include_router( - old_api.router, - deprecated=True, - ) - ``` - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include (or not) all the *path operations* in this router in the - generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - **Example** - - ```python - from fastapi import FastAPI - - from .internal import old_api - - app = FastAPI() - - app.include_router( - old_api.router, - include_in_schema=False, - ) - ``` - """ - ), - ] = True, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - Default response class to be used for the *path operations* in this - router. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - - **Example** - - ```python - from fastapi import FastAPI - from fastapi.responses import ORJSONResponse - - from .internal import old_api - - app = FastAPI() - - app.include_router( - old_api.router, - default_response_class=ORJSONResponse, - ) - ``` - """ - ), - ] = Default(JSONResponse), - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> None: - """ - Include an `APIRouter` in the same app. - - Read more about it in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). - - ## Example - - ```python - from fastapi import FastAPI - - from .users import users_router - - app = FastAPI() - - app.include_router(users_router) - ``` - """ - self.router.include_router( - router, - prefix=prefix, - tags=tags, - dependencies=dependencies, - responses=responses, - deprecated=deprecated, - include_in_schema=include_in_schema, - default_response_class=default_response_class, - callbacks=callbacks, - generate_unique_id_function=generate_unique_id_function, - ) - - def get( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP GET operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.get("/items/") - def read_items(): - return [{"name": "Empanada"}, {"name": "Arepa"}] - ``` - """ - return self.router.get( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def put( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PUT operation. - - ## Example - - ```python - from fastapi import FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - - @app.put("/items/{item_id}") - def replace_item(item_id: str, item: Item): - return {"message": "Item replaced", "id": item_id} - ``` - """ - return self.router.put( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def post( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP POST operation. - - ## Example - - ```python - from fastapi import FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - - @app.post("/items/") - def create_item(item: Item): - return {"message": "Item created"} - ``` - """ - return self.router.post( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def delete( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP DELETE operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.delete("/items/{item_id}") - def delete_item(item_id: str): - return {"message": "Item deleted"} - ``` - """ - return self.router.delete( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def options( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP OPTIONS operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.options("/items/") - def get_item_options(): - return {"additions": ["Aji", "Guacamole"]} - ``` - """ - return self.router.options( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def head( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP HEAD operation. - - ## Example - - ```python - from fastapi import FastAPI, Response - - app = FastAPI() - - @app.head("/items/", status_code=204) - def get_items_headers(response: Response): - response.headers["X-Cat-Dog"] = "Alone in the world" - ``` - """ - return self.router.head( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def patch( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PATCH operation. - - ## Example - - ```python - from fastapi import FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - - @app.patch("/items/") - def update_item(item: Item): - return {"message": "Item updated in place"} - ``` - """ - return self.router.patch( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def trace( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP TRACE operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.trace("/items/{item_id}") - def trace_item(item_id: str): - return None - ``` - """ - return self.router.trace( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def websocket_route( - self, path: str, name: Union[str, None] = None - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.router.add_websocket_route(path, func, name=name) - return func - - return decorator - - @deprecated( - """ - on_event is deprecated, use lifespan event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). - """ - ) - def on_event( - self, - event_type: Annotated[ - str, - Doc( - """ - The type of event. `startup` or `shutdown`. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add an event handler for the application. - - `on_event` is deprecated, use `lifespan` event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). - """ - return self.router.on_event(event_type) - - def middleware( - self, - middleware_type: Annotated[ - str, - Doc( - """ - The type of middleware. Currently only supports `http`. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a middleware to the application. - - Read more about it in the - [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). - - ## Example - - ```python - import time - from typing import Awaitable, Callable - - from fastapi import FastAPI, Request, Response - - app = FastAPI() - - - @app.middleware("http") - async def add_process_time_header( - request: Request, call_next: Callable[[Request], Awaitable[Response]] - ) -> Response: - start_time = time.time() - response = await call_next(request) - process_time = time.time() - start_time - response.headers["X-Process-Time"] = str(process_time) - return response - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_middleware(BaseHTTPMiddleware, dispatch=func) - return func - - return decorator - - def exception_handler( - self, - exc_class_or_status_code: Annotated[ - Union[int, Type[Exception]], - Doc( - """ - The Exception class this would handle, or a status code. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add an exception handler to the app. - - Read more about it in the - [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). - - ## Example - - ```python - from fastapi import FastAPI, Request - from fastapi.responses import JSONResponse - - - class UnicornException(Exception): - def __init__(self, name: str): - self.name = name - - - app = FastAPI() - - - @app.exception_handler(UnicornException) - async def unicorn_exception_handler(request: Request, exc: UnicornException): - return JSONResponse( - status_code=418, - content={"message": f"Oops! {exc.name} did something. There goes a rainbow..."}, - ) - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_exception_handler(exc_class_or_status_code, func) - return func - - return decorator diff --git a/venv/lib/python3.12/site-packages/fastapi/background.py b/venv/lib/python3.12/site-packages/fastapi/background.py deleted file mode 100644 index 203578a4..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/background.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Any, Callable - -from starlette.background import BackgroundTasks as StarletteBackgroundTasks -from typing_extensions import Annotated, Doc, ParamSpec - -P = ParamSpec("P") - - -class BackgroundTasks(StarletteBackgroundTasks): - """ - A collection of background tasks that will be called after a response has been - sent to the client. - - Read more about it in the - [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). - - ## Example - - ```python - from fastapi import BackgroundTasks, FastAPI - - app = FastAPI() - - - def write_notification(email: str, message=""): - with open("log.txt", mode="w") as email_file: - content = f"notification for {email}: {message}" - email_file.write(content) - - - @app.post("/send-notification/{email}") - async def send_notification(email: str, background_tasks: BackgroundTasks): - background_tasks.add_task(write_notification, email, message="some notification") - return {"message": "Notification sent in the background"} - ``` - """ - - def add_task( - self, - func: Annotated[ - Callable[P, Any], - Doc( - """ - The function to call after the response is sent. - - It can be a regular `def` function or an `async def` function. - """ - ), - ], - *args: P.args, - **kwargs: P.kwargs, - ) -> None: - """ - Add a function to be called in the background after the response is sent. - - Read more about it in the - [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). - """ - return super().add_task(func, *args, **kwargs) diff --git a/venv/lib/python3.12/site-packages/fastapi/cli.py b/venv/lib/python3.12/site-packages/fastapi/cli.py deleted file mode 100644 index 8d3301e9..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/cli.py +++ /dev/null @@ -1,13 +0,0 @@ -try: - from fastapi_cli.cli import main as cli_main - -except ImportError: # pragma: no cover - cli_main = None # type: ignore - - -def main() -> None: - if not cli_main: # type: ignore[truthy-function] - message = 'To use the fastapi command, please install "fastapi[standard]":\n\n\tpip install "fastapi[standard]"\n' - print(message) - raise RuntimeError(message) # noqa: B904 - cli_main() diff --git a/venv/lib/python3.12/site-packages/fastapi/concurrency.py b/venv/lib/python3.12/site-packages/fastapi/concurrency.py deleted file mode 100644 index 3202c707..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/concurrency.py +++ /dev/null @@ -1,39 +0,0 @@ -from contextlib import asynccontextmanager as asynccontextmanager -from typing import AsyncGenerator, ContextManager, TypeVar - -import anyio.to_thread -from anyio import CapacityLimiter -from starlette.concurrency import iterate_in_threadpool as iterate_in_threadpool # noqa -from starlette.concurrency import run_in_threadpool as run_in_threadpool # noqa -from starlette.concurrency import ( # noqa - run_until_first_complete as run_until_first_complete, -) - -_T = TypeVar("_T") - - -@asynccontextmanager -async def contextmanager_in_threadpool( - cm: ContextManager[_T], -) -> AsyncGenerator[_T, None]: - # blocking __exit__ from running waiting on a free thread - # can create race conditions/deadlocks if the context manager itself - # has its own internal pool (e.g. a database connection pool) - # to avoid this we let __exit__ run without a capacity limit - # since we're creating a new limiter for each call, any non-zero limit - # works (1 is arbitrary) - exit_limiter = CapacityLimiter(1) - try: - yield await run_in_threadpool(cm.__enter__) - except Exception as e: - ok = bool( - await anyio.to_thread.run_sync( - cm.__exit__, type(e), e, e.__traceback__, limiter=exit_limiter - ) - ) - if not ok: - raise e - else: - await anyio.to_thread.run_sync( - cm.__exit__, None, None, None, limiter=exit_limiter - ) diff --git a/venv/lib/python3.12/site-packages/fastapi/datastructures.py b/venv/lib/python3.12/site-packages/fastapi/datastructures.py deleted file mode 100644 index cf8406b0..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/datastructures.py +++ /dev/null @@ -1,204 +0,0 @@ -from typing import ( - Any, - BinaryIO, - Callable, - Dict, - Iterable, - Optional, - Type, - TypeVar, - cast, -) - -from fastapi._compat import ( - PYDANTIC_V2, - CoreSchema, - GetJsonSchemaHandler, - JsonSchemaValue, - with_info_plain_validator_function, -) -from starlette.datastructures import URL as URL # noqa: F401 -from starlette.datastructures import Address as Address # noqa: F401 -from starlette.datastructures import FormData as FormData # noqa: F401 -from starlette.datastructures import Headers as Headers # noqa: F401 -from starlette.datastructures import QueryParams as QueryParams # noqa: F401 -from starlette.datastructures import State as State # noqa: F401 -from starlette.datastructures import UploadFile as StarletteUploadFile -from typing_extensions import Annotated, Doc - - -class UploadFile(StarletteUploadFile): - """ - A file uploaded in a request. - - Define it as a *path operation function* (or dependency) parameter. - - If you are using a regular `def` function, you can use the `upload_file.file` - attribute to access the raw standard Python file (blocking, not async), useful and - needed for non-async code. - - Read more about it in the - [FastAPI docs for Request Files](https://fastapi.tiangolo.com/tutorial/request-files/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import FastAPI, File, UploadFile - - app = FastAPI() - - - @app.post("/files/") - async def create_file(file: Annotated[bytes, File()]): - return {"file_size": len(file)} - - - @app.post("/uploadfile/") - async def create_upload_file(file: UploadFile): - return {"filename": file.filename} - ``` - """ - - file: Annotated[ - BinaryIO, - Doc("The standard Python file object (non-async)."), - ] - filename: Annotated[Optional[str], Doc("The original file name.")] - size: Annotated[Optional[int], Doc("The size of the file in bytes.")] - headers: Annotated[Headers, Doc("The headers of the request.")] - content_type: Annotated[ - Optional[str], Doc("The content type of the request, from the headers.") - ] - - async def write( - self, - data: Annotated[ - bytes, - Doc( - """ - The bytes to write to the file. - """ - ), - ], - ) -> None: - """ - Write some bytes to the file. - - You normally wouldn't use this from a file you read in a request. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().write(data) - - async def read( - self, - size: Annotated[ - int, - Doc( - """ - The number of bytes to read from the file. - """ - ), - ] = -1, - ) -> bytes: - """ - Read some bytes from the file. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().read(size) - - async def seek( - self, - offset: Annotated[ - int, - Doc( - """ - The position in bytes to seek to in the file. - """ - ), - ], - ) -> None: - """ - Move to a position in the file. - - Any next read or write will be done from that position. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().seek(offset) - - async def close(self) -> None: - """ - Close the file. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().close() - - @classmethod - def __get_validators__(cls: Type["UploadFile"]) -> Iterable[Callable[..., Any]]: - yield cls.validate - - @classmethod - def validate(cls: Type["UploadFile"], v: Any) -> Any: - if not isinstance(v, StarletteUploadFile): - raise ValueError(f"Expected UploadFile, received: {type(v)}") - return v - - @classmethod - def _validate(cls, __input_value: Any, _: Any) -> "UploadFile": - if not isinstance(__input_value, StarletteUploadFile): - raise ValueError(f"Expected UploadFile, received: {type(__input_value)}") - return cast(UploadFile, __input_value) - - if not PYDANTIC_V2: - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update({"type": "string", "format": "binary"}) - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - return {"type": "string", "format": "binary"} - - @classmethod - def __get_pydantic_core_schema__( - cls, source: Type[Any], handler: Callable[[Any], CoreSchema] - ) -> CoreSchema: - return with_info_plain_validator_function(cls._validate) - - -class DefaultPlaceholder: - """ - You shouldn't use this class directly. - - It's used internally to recognize when a default value has been overwritten, even - if the overridden default value was truthy. - """ - - def __init__(self, value: Any): - self.value = value - - def __bool__(self) -> bool: - return bool(self.value) - - def __eq__(self, o: object) -> bool: - return isinstance(o, DefaultPlaceholder) and o.value == self.value - - -DefaultType = TypeVar("DefaultType") - - -def Default(value: DefaultType) -> DefaultType: - """ - You shouldn't use this function directly. - - It's used internally to recognize when a default value has been overwritten, even - if the overridden default value was truthy. - """ - return DefaultPlaceholder(value) # type: ignore diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__init__.py b/venv/lib/python3.12/site-packages/fastapi/dependencies/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index d5a2df1f..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/models.cpython-312.pyc deleted file mode 100644 index a848db40..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 3e49e7fe..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/dependencies/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/models.py b/venv/lib/python3.12/site-packages/fastapi/dependencies/models.py deleted file mode 100644 index 418c1172..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/dependencies/models.py +++ /dev/null @@ -1,37 +0,0 @@ -from dataclasses import dataclass, field -from typing import Any, Callable, List, Optional, Sequence, Tuple - -from fastapi._compat import ModelField -from fastapi.security.base import SecurityBase - - -@dataclass -class SecurityRequirement: - security_scheme: SecurityBase - scopes: Optional[Sequence[str]] = None - - -@dataclass -class Dependant: - path_params: List[ModelField] = field(default_factory=list) - query_params: List[ModelField] = field(default_factory=list) - header_params: List[ModelField] = field(default_factory=list) - cookie_params: List[ModelField] = field(default_factory=list) - body_params: List[ModelField] = field(default_factory=list) - dependencies: List["Dependant"] = field(default_factory=list) - security_requirements: List[SecurityRequirement] = field(default_factory=list) - name: Optional[str] = None - call: Optional[Callable[..., Any]] = None - request_param_name: Optional[str] = None - websocket_param_name: Optional[str] = None - http_connection_param_name: Optional[str] = None - response_param_name: Optional[str] = None - background_tasks_param_name: Optional[str] = None - security_scopes_param_name: Optional[str] = None - security_scopes: Optional[List[str]] = None - use_cache: bool = True - path: Optional[str] = None - cache_key: Tuple[Optional[Callable[..., Any]], Tuple[str, ...]] = field(init=False) - - def __post_init__(self) -> None: - self.cache_key = (self.call, tuple(sorted(set(self.security_scopes or [])))) diff --git a/venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py b/venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py deleted file mode 100644 index 081b63a8..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/dependencies/utils.py +++ /dev/null @@ -1,1001 +0,0 @@ -import inspect -from contextlib import AsyncExitStack, contextmanager -from copy import copy, deepcopy -from dataclasses import dataclass -from typing import ( - Any, - Callable, - Coroutine, - Dict, - ForwardRef, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -import anyio -from fastapi import params -from fastapi._compat import ( - PYDANTIC_V2, - ErrorWrapper, - ModelField, - RequiredParam, - Undefined, - _regenerate_error_with_loc, - copy_field_info, - create_body_model, - evaluate_forwardref, - field_annotation_is_scalar, - get_annotation_from_field_info, - get_cached_model_fields, - get_missing_field_error, - is_bytes_field, - is_bytes_sequence_field, - is_scalar_field, - is_scalar_sequence_field, - is_sequence_field, - is_uploadfile_or_nonable_uploadfile_annotation, - is_uploadfile_sequence_annotation, - lenient_issubclass, - sequence_types, - serialize_sequence_value, - value_is_sequence, -) -from fastapi.background import BackgroundTasks -from fastapi.concurrency import ( - asynccontextmanager, - contextmanager_in_threadpool, -) -from fastapi.dependencies.models import Dependant, SecurityRequirement -from fastapi.logger import logger -from fastapi.security.base import SecurityBase -from fastapi.security.oauth2 import OAuth2, SecurityScopes -from fastapi.security.open_id_connect_url import OpenIdConnect -from fastapi.utils import create_model_field, get_path_param_names -from pydantic import BaseModel -from pydantic.fields import FieldInfo -from starlette.background import BackgroundTasks as StarletteBackgroundTasks -from starlette.concurrency import run_in_threadpool -from starlette.datastructures import ( - FormData, - Headers, - ImmutableMultiDict, - QueryParams, - UploadFile, -) -from starlette.requests import HTTPConnection, Request -from starlette.responses import Response -from starlette.websockets import WebSocket -from typing_extensions import Annotated, get_args, get_origin - -multipart_not_installed_error = ( - 'Form data requires "python-multipart" to be installed. \n' - 'You can install "python-multipart" with: \n\n' - "pip install python-multipart\n" -) -multipart_incorrect_install_error = ( - 'Form data requires "python-multipart" to be installed. ' - 'It seems you installed "multipart" instead. \n' - 'You can remove "multipart" with: \n\n' - "pip uninstall multipart\n\n" - 'And then install "python-multipart" with: \n\n' - "pip install python-multipart\n" -) - - -def ensure_multipart_is_installed() -> None: - try: - from python_multipart import __version__ - - # Import an attribute that can be mocked/deleted in testing - assert __version__ > "0.0.12" - except (ImportError, AssertionError): - try: - # __version__ is available in both multiparts, and can be mocked - from multipart import __version__ # type: ignore[no-redef,import-untyped] - - assert __version__ - try: - # parse_options_header is only available in the right multipart - from multipart.multipart import ( # type: ignore[import-untyped] - parse_options_header, - ) - - assert parse_options_header - except ImportError: - logger.error(multipart_incorrect_install_error) - raise RuntimeError(multipart_incorrect_install_error) from None - except ImportError: - logger.error(multipart_not_installed_error) - raise RuntimeError(multipart_not_installed_error) from None - - -def get_param_sub_dependant( - *, - param_name: str, - depends: params.Depends, - path: str, - security_scopes: Optional[List[str]] = None, -) -> Dependant: - assert depends.dependency - return get_sub_dependant( - depends=depends, - dependency=depends.dependency, - path=path, - name=param_name, - security_scopes=security_scopes, - ) - - -def get_parameterless_sub_dependant(*, depends: params.Depends, path: str) -> Dependant: - assert callable(depends.dependency), ( - "A parameter-less dependency must have a callable dependency" - ) - return get_sub_dependant(depends=depends, dependency=depends.dependency, path=path) - - -def get_sub_dependant( - *, - depends: params.Depends, - dependency: Callable[..., Any], - path: str, - name: Optional[str] = None, - security_scopes: Optional[List[str]] = None, -) -> Dependant: - security_requirement = None - security_scopes = security_scopes or [] - if isinstance(depends, params.Security): - dependency_scopes = depends.scopes - security_scopes.extend(dependency_scopes) - if isinstance(dependency, SecurityBase): - use_scopes: List[str] = [] - if isinstance(dependency, (OAuth2, OpenIdConnect)): - use_scopes = security_scopes - security_requirement = SecurityRequirement( - security_scheme=dependency, scopes=use_scopes - ) - sub_dependant = get_dependant( - path=path, - call=dependency, - name=name, - security_scopes=security_scopes, - use_cache=depends.use_cache, - ) - if security_requirement: - sub_dependant.security_requirements.append(security_requirement) - return sub_dependant - - -CacheKey = Tuple[Optional[Callable[..., Any]], Tuple[str, ...]] - - -def get_flat_dependant( - dependant: Dependant, - *, - skip_repeats: bool = False, - visited: Optional[List[CacheKey]] = None, -) -> Dependant: - if visited is None: - visited = [] - visited.append(dependant.cache_key) - - flat_dependant = Dependant( - path_params=dependant.path_params.copy(), - query_params=dependant.query_params.copy(), - header_params=dependant.header_params.copy(), - cookie_params=dependant.cookie_params.copy(), - body_params=dependant.body_params.copy(), - security_requirements=dependant.security_requirements.copy(), - use_cache=dependant.use_cache, - path=dependant.path, - ) - for sub_dependant in dependant.dependencies: - if skip_repeats and sub_dependant.cache_key in visited: - continue - flat_sub = get_flat_dependant( - sub_dependant, skip_repeats=skip_repeats, visited=visited - ) - flat_dependant.path_params.extend(flat_sub.path_params) - flat_dependant.query_params.extend(flat_sub.query_params) - flat_dependant.header_params.extend(flat_sub.header_params) - flat_dependant.cookie_params.extend(flat_sub.cookie_params) - flat_dependant.body_params.extend(flat_sub.body_params) - flat_dependant.security_requirements.extend(flat_sub.security_requirements) - return flat_dependant - - -def _get_flat_fields_from_params(fields: List[ModelField]) -> List[ModelField]: - if not fields: - return fields - first_field = fields[0] - if len(fields) == 1 and lenient_issubclass(first_field.type_, BaseModel): - fields_to_extract = get_cached_model_fields(first_field.type_) - return fields_to_extract - return fields - - -def get_flat_params(dependant: Dependant) -> List[ModelField]: - flat_dependant = get_flat_dependant(dependant, skip_repeats=True) - path_params = _get_flat_fields_from_params(flat_dependant.path_params) - query_params = _get_flat_fields_from_params(flat_dependant.query_params) - header_params = _get_flat_fields_from_params(flat_dependant.header_params) - cookie_params = _get_flat_fields_from_params(flat_dependant.cookie_params) - return path_params + query_params + header_params + cookie_params - - -def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: - signature = inspect.signature(call) - globalns = getattr(call, "__globals__", {}) - typed_params = [ - inspect.Parameter( - name=param.name, - kind=param.kind, - default=param.default, - annotation=get_typed_annotation(param.annotation, globalns), - ) - for param in signature.parameters.values() - ] - typed_signature = inspect.Signature(typed_params) - return typed_signature - - -def get_typed_annotation(annotation: Any, globalns: Dict[str, Any]) -> Any: - if isinstance(annotation, str): - annotation = ForwardRef(annotation) - annotation = evaluate_forwardref(annotation, globalns, globalns) - return annotation - - -def get_typed_return_annotation(call: Callable[..., Any]) -> Any: - signature = inspect.signature(call) - annotation = signature.return_annotation - - if annotation is inspect.Signature.empty: - return None - - globalns = getattr(call, "__globals__", {}) - return get_typed_annotation(annotation, globalns) - - -def get_dependant( - *, - path: str, - call: Callable[..., Any], - name: Optional[str] = None, - security_scopes: Optional[List[str]] = None, - use_cache: bool = True, -) -> Dependant: - path_param_names = get_path_param_names(path) - endpoint_signature = get_typed_signature(call) - signature_params = endpoint_signature.parameters - dependant = Dependant( - call=call, - name=name, - path=path, - security_scopes=security_scopes, - use_cache=use_cache, - ) - for param_name, param in signature_params.items(): - is_path_param = param_name in path_param_names - param_details = analyze_param( - param_name=param_name, - annotation=param.annotation, - value=param.default, - is_path_param=is_path_param, - ) - if param_details.depends is not None: - sub_dependant = get_param_sub_dependant( - param_name=param_name, - depends=param_details.depends, - path=path, - security_scopes=security_scopes, - ) - dependant.dependencies.append(sub_dependant) - continue - if add_non_field_param_to_dependency( - param_name=param_name, - type_annotation=param_details.type_annotation, - dependant=dependant, - ): - assert param_details.field is None, ( - f"Cannot specify multiple FastAPI annotations for {param_name!r}" - ) - continue - assert param_details.field is not None - if isinstance(param_details.field.field_info, params.Body): - dependant.body_params.append(param_details.field) - else: - add_param_to_fields(field=param_details.field, dependant=dependant) - return dependant - - -def add_non_field_param_to_dependency( - *, param_name: str, type_annotation: Any, dependant: Dependant -) -> Optional[bool]: - if lenient_issubclass(type_annotation, Request): - dependant.request_param_name = param_name - return True - elif lenient_issubclass(type_annotation, WebSocket): - dependant.websocket_param_name = param_name - return True - elif lenient_issubclass(type_annotation, HTTPConnection): - dependant.http_connection_param_name = param_name - return True - elif lenient_issubclass(type_annotation, Response): - dependant.response_param_name = param_name - return True - elif lenient_issubclass(type_annotation, StarletteBackgroundTasks): - dependant.background_tasks_param_name = param_name - return True - elif lenient_issubclass(type_annotation, SecurityScopes): - dependant.security_scopes_param_name = param_name - return True - return None - - -@dataclass -class ParamDetails: - type_annotation: Any - depends: Optional[params.Depends] - field: Optional[ModelField] - - -def analyze_param( - *, - param_name: str, - annotation: Any, - value: Any, - is_path_param: bool, -) -> ParamDetails: - field_info = None - depends = None - type_annotation: Any = Any - use_annotation: Any = Any - if annotation is not inspect.Signature.empty: - use_annotation = annotation - type_annotation = annotation - # Extract Annotated info - if get_origin(use_annotation) is Annotated: - annotated_args = get_args(annotation) - type_annotation = annotated_args[0] - fastapi_annotations = [ - arg - for arg in annotated_args[1:] - if isinstance(arg, (FieldInfo, params.Depends)) - ] - fastapi_specific_annotations = [ - arg - for arg in fastapi_annotations - if isinstance(arg, (params.Param, params.Body, params.Depends)) - ] - if fastapi_specific_annotations: - fastapi_annotation: Union[FieldInfo, params.Depends, None] = ( - fastapi_specific_annotations[-1] - ) - else: - fastapi_annotation = None - # Set default for Annotated FieldInfo - if isinstance(fastapi_annotation, FieldInfo): - # Copy `field_info` because we mutate `field_info.default` below. - field_info = copy_field_info( - field_info=fastapi_annotation, annotation=use_annotation - ) - assert ( - field_info.default is Undefined or field_info.default is RequiredParam - ), ( - f"`{field_info.__class__.__name__}` default value cannot be set in" - f" `Annotated` for {param_name!r}. Set the default value with `=` instead." - ) - if value is not inspect.Signature.empty: - assert not is_path_param, "Path parameters cannot have default values" - field_info.default = value - else: - field_info.default = RequiredParam - # Get Annotated Depends - elif isinstance(fastapi_annotation, params.Depends): - depends = fastapi_annotation - # Get Depends from default value - if isinstance(value, params.Depends): - assert depends is None, ( - "Cannot specify `Depends` in `Annotated` and default value" - f" together for {param_name!r}" - ) - assert field_info is None, ( - "Cannot specify a FastAPI annotation in `Annotated` and `Depends` as a" - f" default value together for {param_name!r}" - ) - depends = value - # Get FieldInfo from default value - elif isinstance(value, FieldInfo): - assert field_info is None, ( - "Cannot specify FastAPI annotations in `Annotated` and default value" - f" together for {param_name!r}" - ) - field_info = value - if PYDANTIC_V2: - field_info.annotation = type_annotation - - # Get Depends from type annotation - if depends is not None and depends.dependency is None: - # Copy `depends` before mutating it - depends = copy(depends) - depends.dependency = type_annotation - - # Handle non-param type annotations like Request - if lenient_issubclass( - type_annotation, - ( - Request, - WebSocket, - HTTPConnection, - Response, - StarletteBackgroundTasks, - SecurityScopes, - ), - ): - assert depends is None, f"Cannot specify `Depends` for type {type_annotation!r}" - assert field_info is None, ( - f"Cannot specify FastAPI annotation for type {type_annotation!r}" - ) - # Handle default assignations, neither field_info nor depends was not found in Annotated nor default value - elif field_info is None and depends is None: - default_value = value if value is not inspect.Signature.empty else RequiredParam - if is_path_param: - # We might check here that `default_value is RequiredParam`, but the fact is that the same - # parameter might sometimes be a path parameter and sometimes not. See - # `tests/test_infer_param_optionality.py` for an example. - field_info = params.Path(annotation=use_annotation) - elif is_uploadfile_or_nonable_uploadfile_annotation( - type_annotation - ) or is_uploadfile_sequence_annotation(type_annotation): - field_info = params.File(annotation=use_annotation, default=default_value) - elif not field_annotation_is_scalar(annotation=type_annotation): - field_info = params.Body(annotation=use_annotation, default=default_value) - else: - field_info = params.Query(annotation=use_annotation, default=default_value) - - field = None - # It's a field_info, not a dependency - if field_info is not None: - # Handle field_info.in_ - if is_path_param: - assert isinstance(field_info, params.Path), ( - f"Cannot use `{field_info.__class__.__name__}` for path param" - f" {param_name!r}" - ) - elif ( - isinstance(field_info, params.Param) - and getattr(field_info, "in_", None) is None - ): - field_info.in_ = params.ParamTypes.query - use_annotation_from_field_info = get_annotation_from_field_info( - use_annotation, - field_info, - param_name, - ) - if isinstance(field_info, params.Form): - ensure_multipart_is_installed() - if not field_info.alias and getattr(field_info, "convert_underscores", None): - alias = param_name.replace("_", "-") - else: - alias = field_info.alias or param_name - field_info.alias = alias - field = create_model_field( - name=param_name, - type_=use_annotation_from_field_info, - default=field_info.default, - alias=alias, - required=field_info.default in (RequiredParam, Undefined), - field_info=field_info, - ) - if is_path_param: - assert is_scalar_field(field=field), ( - "Path params must be of one of the supported types" - ) - elif isinstance(field_info, params.Query): - assert ( - is_scalar_field(field) - or is_scalar_sequence_field(field) - or ( - lenient_issubclass(field.type_, BaseModel) - # For Pydantic v1 - and getattr(field, "shape", 1) == 1 - ) - ) - - return ParamDetails(type_annotation=type_annotation, depends=depends, field=field) - - -def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None: - field_info = field.field_info - field_info_in = getattr(field_info, "in_", None) - if field_info_in == params.ParamTypes.path: - dependant.path_params.append(field) - elif field_info_in == params.ParamTypes.query: - dependant.query_params.append(field) - elif field_info_in == params.ParamTypes.header: - dependant.header_params.append(field) - else: - assert field_info_in == params.ParamTypes.cookie, ( - f"non-body parameters must be in path, query, header or cookie: {field.name}" - ) - dependant.cookie_params.append(field) - - -def is_coroutine_callable(call: Callable[..., Any]) -> bool: - if inspect.isroutine(call): - return inspect.iscoroutinefunction(call) - if inspect.isclass(call): - return False - dunder_call = getattr(call, "__call__", None) # noqa: B004 - return inspect.iscoroutinefunction(dunder_call) - - -def is_async_gen_callable(call: Callable[..., Any]) -> bool: - if inspect.isasyncgenfunction(call): - return True - dunder_call = getattr(call, "__call__", None) # noqa: B004 - return inspect.isasyncgenfunction(dunder_call) - - -def is_gen_callable(call: Callable[..., Any]) -> bool: - if inspect.isgeneratorfunction(call): - return True - dunder_call = getattr(call, "__call__", None) # noqa: B004 - return inspect.isgeneratorfunction(dunder_call) - - -async def solve_generator( - *, call: Callable[..., Any], stack: AsyncExitStack, sub_values: Dict[str, Any] -) -> Any: - if is_gen_callable(call): - cm = contextmanager_in_threadpool(contextmanager(call)(**sub_values)) - elif is_async_gen_callable(call): - cm = asynccontextmanager(call)(**sub_values) - return await stack.enter_async_context(cm) - - -@dataclass -class SolvedDependency: - values: Dict[str, Any] - errors: List[Any] - background_tasks: Optional[StarletteBackgroundTasks] - response: Response - dependency_cache: Dict[Tuple[Callable[..., Any], Tuple[str]], Any] - - -async def solve_dependencies( - *, - request: Union[Request, WebSocket], - dependant: Dependant, - body: Optional[Union[Dict[str, Any], FormData]] = None, - background_tasks: Optional[StarletteBackgroundTasks] = None, - response: Optional[Response] = None, - dependency_overrides_provider: Optional[Any] = None, - dependency_cache: Optional[Dict[Tuple[Callable[..., Any], Tuple[str]], Any]] = None, - async_exit_stack: AsyncExitStack, - embed_body_fields: bool, -) -> SolvedDependency: - values: Dict[str, Any] = {} - errors: List[Any] = [] - if response is None: - response = Response() - del response.headers["content-length"] - response.status_code = None # type: ignore - dependency_cache = dependency_cache or {} - sub_dependant: Dependant - for sub_dependant in dependant.dependencies: - sub_dependant.call = cast(Callable[..., Any], sub_dependant.call) - sub_dependant.cache_key = cast( - Tuple[Callable[..., Any], Tuple[str]], sub_dependant.cache_key - ) - call = sub_dependant.call - use_sub_dependant = sub_dependant - if ( - dependency_overrides_provider - and dependency_overrides_provider.dependency_overrides - ): - original_call = sub_dependant.call - call = getattr( - dependency_overrides_provider, "dependency_overrides", {} - ).get(original_call, original_call) - use_path: str = sub_dependant.path # type: ignore - use_sub_dependant = get_dependant( - path=use_path, - call=call, - name=sub_dependant.name, - security_scopes=sub_dependant.security_scopes, - ) - - solved_result = await solve_dependencies( - request=request, - dependant=use_sub_dependant, - body=body, - background_tasks=background_tasks, - response=response, - dependency_overrides_provider=dependency_overrides_provider, - dependency_cache=dependency_cache, - async_exit_stack=async_exit_stack, - embed_body_fields=embed_body_fields, - ) - background_tasks = solved_result.background_tasks - dependency_cache.update(solved_result.dependency_cache) - if solved_result.errors: - errors.extend(solved_result.errors) - continue - if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache: - solved = dependency_cache[sub_dependant.cache_key] - elif is_gen_callable(call) or is_async_gen_callable(call): - solved = await solve_generator( - call=call, stack=async_exit_stack, sub_values=solved_result.values - ) - elif is_coroutine_callable(call): - solved = await call(**solved_result.values) - else: - solved = await run_in_threadpool(call, **solved_result.values) - if sub_dependant.name is not None: - values[sub_dependant.name] = solved - if sub_dependant.cache_key not in dependency_cache: - dependency_cache[sub_dependant.cache_key] = solved - path_values, path_errors = request_params_to_args( - dependant.path_params, request.path_params - ) - query_values, query_errors = request_params_to_args( - dependant.query_params, request.query_params - ) - header_values, header_errors = request_params_to_args( - dependant.header_params, request.headers - ) - cookie_values, cookie_errors = request_params_to_args( - dependant.cookie_params, request.cookies - ) - values.update(path_values) - values.update(query_values) - values.update(header_values) - values.update(cookie_values) - errors += path_errors + query_errors + header_errors + cookie_errors - if dependant.body_params: - ( - body_values, - body_errors, - ) = await request_body_to_args( # body_params checked above - body_fields=dependant.body_params, - received_body=body, - embed_body_fields=embed_body_fields, - ) - values.update(body_values) - errors.extend(body_errors) - if dependant.http_connection_param_name: - values[dependant.http_connection_param_name] = request - if dependant.request_param_name and isinstance(request, Request): - values[dependant.request_param_name] = request - elif dependant.websocket_param_name and isinstance(request, WebSocket): - values[dependant.websocket_param_name] = request - if dependant.background_tasks_param_name: - if background_tasks is None: - background_tasks = BackgroundTasks() - values[dependant.background_tasks_param_name] = background_tasks - if dependant.response_param_name: - values[dependant.response_param_name] = response - if dependant.security_scopes_param_name: - values[dependant.security_scopes_param_name] = SecurityScopes( - scopes=dependant.security_scopes - ) - return SolvedDependency( - values=values, - errors=errors, - background_tasks=background_tasks, - response=response, - dependency_cache=dependency_cache, - ) - - -def _validate_value_with_model_field( - *, field: ModelField, value: Any, values: Dict[str, Any], loc: Tuple[str, ...] -) -> Tuple[Any, List[Any]]: - if value is None: - if field.required: - return None, [get_missing_field_error(loc=loc)] - else: - return deepcopy(field.default), [] - v_, errors_ = field.validate(value, values, loc=loc) - if isinstance(errors_, ErrorWrapper): - return None, [errors_] - elif isinstance(errors_, list): - new_errors = _regenerate_error_with_loc(errors=errors_, loc_prefix=()) - return None, new_errors - else: - return v_, [] - - -def _get_multidict_value( - field: ModelField, values: Mapping[str, Any], alias: Union[str, None] = None -) -> Any: - alias = alias or field.alias - if is_sequence_field(field) and isinstance(values, (ImmutableMultiDict, Headers)): - value = values.getlist(alias) - else: - value = values.get(alias, None) - if ( - value is None - or ( - isinstance(field.field_info, params.Form) - and isinstance(value, str) # For type checks - and value == "" - ) - or (is_sequence_field(field) and len(value) == 0) - ): - if field.required: - return - else: - return deepcopy(field.default) - return value - - -def request_params_to_args( - fields: Sequence[ModelField], - received_params: Union[Mapping[str, Any], QueryParams, Headers], -) -> Tuple[Dict[str, Any], List[Any]]: - values: Dict[str, Any] = {} - errors: List[Dict[str, Any]] = [] - - if not fields: - return values, errors - - first_field = fields[0] - fields_to_extract = fields - single_not_embedded_field = False - default_convert_underscores = True - if len(fields) == 1 and lenient_issubclass(first_field.type_, BaseModel): - fields_to_extract = get_cached_model_fields(first_field.type_) - single_not_embedded_field = True - # If headers are in a Pydantic model, the way to disable convert_underscores - # would be with Header(convert_underscores=False) at the Pydantic model level - default_convert_underscores = getattr( - first_field.field_info, "convert_underscores", True - ) - - params_to_process: Dict[str, Any] = {} - - processed_keys = set() - - for field in fields_to_extract: - alias = None - if isinstance(received_params, Headers): - # Handle fields extracted from a Pydantic Model for a header, each field - # doesn't have a FieldInfo of type Header with the default convert_underscores=True - convert_underscores = getattr( - field.field_info, "convert_underscores", default_convert_underscores - ) - if convert_underscores: - alias = ( - field.alias - if field.alias != field.name - else field.name.replace("_", "-") - ) - value = _get_multidict_value(field, received_params, alias=alias) - if value is not None: - params_to_process[field.name] = value - processed_keys.add(alias or field.alias) - processed_keys.add(field.name) - - for key, value in received_params.items(): - if key not in processed_keys: - params_to_process[key] = value - - if single_not_embedded_field: - field_info = first_field.field_info - assert isinstance(field_info, params.Param), ( - "Params must be subclasses of Param" - ) - loc: Tuple[str, ...] = (field_info.in_.value,) - v_, errors_ = _validate_value_with_model_field( - field=first_field, value=params_to_process, values=values, loc=loc - ) - return {first_field.name: v_}, errors_ - - for field in fields: - value = _get_multidict_value(field, received_params) - field_info = field.field_info - assert isinstance(field_info, params.Param), ( - "Params must be subclasses of Param" - ) - loc = (field_info.in_.value, field.alias) - v_, errors_ = _validate_value_with_model_field( - field=field, value=value, values=values, loc=loc - ) - if errors_: - errors.extend(errors_) - else: - values[field.name] = v_ - return values, errors - - -def is_union_of_base_models(field_type: Any) -> bool: - """Check if field type is a Union where all members are BaseModel subclasses.""" - from fastapi.types import UnionType - - origin = get_origin(field_type) - - # Check if it's a Union type (covers both typing.Union and types.UnionType in Python 3.10+) - if origin is not Union and origin is not UnionType: - return False - - union_args = get_args(field_type) - - for arg in union_args: - if not lenient_issubclass(arg, BaseModel): - return False - - return True - - -def _should_embed_body_fields(fields: List[ModelField]) -> bool: - if not fields: - return False - # More than one dependency could have the same field, it would show up as multiple - # fields but it's the same one, so count them by name - body_param_names_set = {field.name for field in fields} - # A top level field has to be a single field, not multiple - if len(body_param_names_set) > 1: - return True - first_field = fields[0] - # If it explicitly specifies it is embedded, it has to be embedded - if getattr(first_field.field_info, "embed", None): - return True - # If it's a Form (or File) field, it has to be a BaseModel (or a union of BaseModels) to be top level - # otherwise it has to be embedded, so that the key value pair can be extracted - if ( - isinstance(first_field.field_info, params.Form) - and not lenient_issubclass(first_field.type_, BaseModel) - and not is_union_of_base_models(first_field.type_) - ): - return True - return False - - -async def _extract_form_body( - body_fields: List[ModelField], - received_body: FormData, -) -> Dict[str, Any]: - values = {} - first_field = body_fields[0] - first_field_info = first_field.field_info - - for field in body_fields: - value = _get_multidict_value(field, received_body) - if ( - isinstance(first_field_info, params.File) - and is_bytes_field(field) - and isinstance(value, UploadFile) - ): - value = await value.read() - elif ( - is_bytes_sequence_field(field) - and isinstance(first_field_info, params.File) - and value_is_sequence(value) - ): - # For types - assert isinstance(value, sequence_types) # type: ignore[arg-type] - results: List[Union[bytes, str]] = [] - - async def process_fn( - fn: Callable[[], Coroutine[Any, Any, Any]], - ) -> None: - result = await fn() - results.append(result) # noqa: B023 - - async with anyio.create_task_group() as tg: - for sub_value in value: - tg.start_soon(process_fn, sub_value.read) - value = serialize_sequence_value(field=field, value=results) - if value is not None: - values[field.alias] = value - for key, value in received_body.items(): - if key not in values: - values[key] = value - return values - - -async def request_body_to_args( - body_fields: List[ModelField], - received_body: Optional[Union[Dict[str, Any], FormData]], - embed_body_fields: bool, -) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: - values: Dict[str, Any] = {} - errors: List[Dict[str, Any]] = [] - assert body_fields, "request_body_to_args() should be called with fields" - single_not_embedded_field = len(body_fields) == 1 and not embed_body_fields - first_field = body_fields[0] - body_to_process = received_body - - fields_to_extract: List[ModelField] = body_fields - - if single_not_embedded_field and lenient_issubclass(first_field.type_, BaseModel): - fields_to_extract = get_cached_model_fields(first_field.type_) - - if isinstance(received_body, FormData): - body_to_process = await _extract_form_body(fields_to_extract, received_body) - - if single_not_embedded_field: - loc: Tuple[str, ...] = ("body",) - v_, errors_ = _validate_value_with_model_field( - field=first_field, value=body_to_process, values=values, loc=loc - ) - return {first_field.name: v_}, errors_ - for field in body_fields: - loc = ("body", field.alias) - value: Optional[Any] = None - if body_to_process is not None: - try: - value = body_to_process.get(field.alias) - # If the received body is a list, not a dict - except AttributeError: - errors.append(get_missing_field_error(loc)) - continue - v_, errors_ = _validate_value_with_model_field( - field=field, value=value, values=values, loc=loc - ) - if errors_: - errors.extend(errors_) - else: - values[field.name] = v_ - return values, errors - - -def get_body_field( - *, flat_dependant: Dependant, name: str, embed_body_fields: bool -) -> Optional[ModelField]: - """ - Get a ModelField representing the request body for a path operation, combining - all body parameters into a single field if necessary. - - Used to check if it's form data (with `isinstance(body_field, params.Form)`) - or JSON and to generate the JSON Schema for a request body. - - This is **not** used to validate/parse the request body, that's done with each - individual body parameter. - """ - if not flat_dependant.body_params: - return None - first_param = flat_dependant.body_params[0] - if not embed_body_fields: - return first_param - model_name = "Body_" + name - BodyModel = create_body_model( - fields=flat_dependant.body_params, model_name=model_name - ) - required = any(True for f in flat_dependant.body_params if f.required) - BodyFieldInfo_kwargs: Dict[str, Any] = { - "annotation": BodyModel, - "alias": "body", - } - if not required: - BodyFieldInfo_kwargs["default"] = None - if any(isinstance(f.field_info, params.File) for f in flat_dependant.body_params): - BodyFieldInfo: Type[params.Body] = params.File - elif any(isinstance(f.field_info, params.Form) for f in flat_dependant.body_params): - BodyFieldInfo = params.Form - else: - BodyFieldInfo = params.Body - - body_param_media_types = [ - f.field_info.media_type - for f in flat_dependant.body_params - if isinstance(f.field_info, params.Body) - ] - if len(set(body_param_media_types)) == 1: - BodyFieldInfo_kwargs["media_type"] = body_param_media_types[0] - final_field = create_model_field( - name="body", - type_=BodyModel, - required=required, - alias="body", - field_info=BodyFieldInfo(**BodyFieldInfo_kwargs), - ) - return final_field diff --git a/venv/lib/python3.12/site-packages/fastapi/encoders.py b/venv/lib/python3.12/site-packages/fastapi/encoders.py deleted file mode 100644 index 451ea076..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/encoders.py +++ /dev/null @@ -1,343 +0,0 @@ -import dataclasses -import datetime -from collections import defaultdict, deque -from decimal import Decimal -from enum import Enum -from ipaddress import ( - IPv4Address, - IPv4Interface, - IPv4Network, - IPv6Address, - IPv6Interface, - IPv6Network, -) -from pathlib import Path, PurePath -from re import Pattern -from types import GeneratorType -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union -from uuid import UUID - -from fastapi.types import IncEx -from pydantic import BaseModel -from pydantic.color import Color -from pydantic.networks import AnyUrl, NameEmail -from pydantic.types import SecretBytes, SecretStr -from typing_extensions import Annotated, Doc - -from ._compat import PYDANTIC_V2, UndefinedType, Url, _model_dump - - -# Taken from Pydantic v1 as is -def isoformat(o: Union[datetime.date, datetime.time]) -> str: - return o.isoformat() - - -# Taken from Pydantic v1 as is -# TODO: pv2 should this return strings instead? -def decimal_encoder(dec_value: Decimal) -> Union[int, float]: - """ - Encodes a Decimal as int of there's no exponent, otherwise float - - This is useful when we use ConstrainedDecimal to represent Numeric(x,0) - where a integer (but not int typed) is used. Encoding this as a float - results in failed round-tripping between encode and parse. - Our Id type is a prime example of this. - - >>> decimal_encoder(Decimal("1.0")) - 1.0 - - >>> decimal_encoder(Decimal("1")) - 1 - """ - if dec_value.as_tuple().exponent >= 0: # type: ignore[operator] - return int(dec_value) - else: - return float(dec_value) - - -ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { - bytes: lambda o: o.decode(), - Color: str, - datetime.date: isoformat, - datetime.datetime: isoformat, - datetime.time: isoformat, - datetime.timedelta: lambda td: td.total_seconds(), - Decimal: decimal_encoder, - Enum: lambda o: o.value, - frozenset: list, - deque: list, - GeneratorType: list, - IPv4Address: str, - IPv4Interface: str, - IPv4Network: str, - IPv6Address: str, - IPv6Interface: str, - IPv6Network: str, - NameEmail: str, - Path: str, - Pattern: lambda o: o.pattern, - SecretBytes: str, - SecretStr: str, - set: list, - UUID: str, - Url: str, - AnyUrl: str, -} - - -def generate_encoders_by_class_tuples( - type_encoder_map: Dict[Any, Callable[[Any], Any]], -) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: - encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( - tuple - ) - for type_, encoder in type_encoder_map.items(): - encoders_by_class_tuples[encoder] += (type_,) - return encoders_by_class_tuples - - -encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) - - -def jsonable_encoder( - obj: Annotated[ - Any, - Doc( - """ - The input object to convert to JSON. - """ - ), - ], - include: Annotated[ - Optional[IncEx], - Doc( - """ - Pydantic's `include` parameter, passed to Pydantic models to set the - fields to include. - """ - ), - ] = None, - exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Pydantic's `exclude` parameter, passed to Pydantic models to set the - fields to exclude. - """ - ), - ] = None, - by_alias: Annotated[ - bool, - Doc( - """ - Pydantic's `by_alias` parameter, passed to Pydantic models to define if - the output should use the alias names (when provided) or the Python - attribute names. In an API, if you set an alias, it's probably because you - want to use it in the result, so you probably want to leave this set to - `True`. - """ - ), - ] = True, - exclude_unset: Annotated[ - bool, - Doc( - """ - Pydantic's `exclude_unset` parameter, passed to Pydantic models to define - if it should exclude from the output the fields that were not explicitly - set (and that only had their default values). - """ - ), - ] = False, - exclude_defaults: Annotated[ - bool, - Doc( - """ - Pydantic's `exclude_defaults` parameter, passed to Pydantic models to define - if it should exclude from the output the fields that had the same default - value, even when they were explicitly set. - """ - ), - ] = False, - exclude_none: Annotated[ - bool, - Doc( - """ - Pydantic's `exclude_none` parameter, passed to Pydantic models to define - if it should exclude from the output any fields that have a `None` value. - """ - ), - ] = False, - custom_encoder: Annotated[ - Optional[Dict[Any, Callable[[Any], Any]]], - Doc( - """ - Pydantic's `custom_encoder` parameter, passed to Pydantic models to define - a custom encoder. - """ - ), - ] = None, - sqlalchemy_safe: Annotated[ - bool, - Doc( - """ - Exclude from the output any fields that start with the name `_sa`. - - This is mainly a hack for compatibility with SQLAlchemy objects, they - store internal SQLAlchemy-specific state in attributes named with `_sa`, - and those objects can't (and shouldn't be) serialized to JSON. - """ - ), - ] = True, -) -> Any: - """ - Convert any object to something that can be encoded in JSON. - - This is used internally by FastAPI to make sure anything you return can be - encoded as JSON before it is sent to the client. - - You can also use it yourself, for example to convert objects before saving them - in a database that supports only JSON. - - Read more about it in the - [FastAPI docs for JSON Compatible Encoder](https://fastapi.tiangolo.com/tutorial/encoder/). - """ - custom_encoder = custom_encoder or {} - if custom_encoder: - if type(obj) in custom_encoder: - return custom_encoder[type(obj)](obj) - else: - for encoder_type, encoder_instance in custom_encoder.items(): - if isinstance(obj, encoder_type): - return encoder_instance(obj) - if include is not None and not isinstance(include, (set, dict)): - include = set(include) - if exclude is not None and not isinstance(exclude, (set, dict)): - exclude = set(exclude) - if isinstance(obj, BaseModel): - # TODO: remove when deprecating Pydantic v1 - encoders: Dict[Any, Any] = {} - if not PYDANTIC_V2: - encoders = getattr(obj.__config__, "json_encoders", {}) # type: ignore[attr-defined] - if custom_encoder: - encoders.update(custom_encoder) - obj_dict = _model_dump( - obj, - mode="json", - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_none=exclude_none, - exclude_defaults=exclude_defaults, - ) - if "__root__" in obj_dict: - obj_dict = obj_dict["__root__"] - return jsonable_encoder( - obj_dict, - exclude_none=exclude_none, - exclude_defaults=exclude_defaults, - # TODO: remove when deprecating Pydantic v1 - custom_encoder=encoders, - sqlalchemy_safe=sqlalchemy_safe, - ) - if dataclasses.is_dataclass(obj): - obj_dict = dataclasses.asdict(obj) - return jsonable_encoder( - obj_dict, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - if isinstance(obj, Enum): - return obj.value - if isinstance(obj, PurePath): - return str(obj) - if isinstance(obj, (str, int, float, type(None))): - return obj - if isinstance(obj, UndefinedType): - return None - if isinstance(obj, dict): - encoded_dict = {} - allowed_keys = set(obj.keys()) - if include is not None: - allowed_keys &= set(include) - if exclude is not None: - allowed_keys -= set(exclude) - for key, value in obj.items(): - if ( - ( - not sqlalchemy_safe - or (not isinstance(key, str)) - or (not key.startswith("_sa")) - ) - and (value is not None or not exclude_none) - and key in allowed_keys - ): - encoded_key = jsonable_encoder( - key, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - encoded_value = jsonable_encoder( - value, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - encoded_dict[encoded_key] = encoded_value - return encoded_dict - if isinstance(obj, (list, set, frozenset, GeneratorType, tuple, deque)): - encoded_list = [] - for item in obj: - encoded_list.append( - jsonable_encoder( - item, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - ) - return encoded_list - - if type(obj) in ENCODERS_BY_TYPE: - return ENCODERS_BY_TYPE[type(obj)](obj) - for encoder, classes_tuple in encoders_by_class_tuples.items(): - if isinstance(obj, classes_tuple): - return encoder(obj) - - try: - data = dict(obj) - except Exception as e: - errors: List[Exception] = [] - errors.append(e) - try: - data = vars(obj) - except Exception as e: - errors.append(e) - raise ValueError(errors) from e - return jsonable_encoder( - data, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) diff --git a/venv/lib/python3.12/site-packages/fastapi/exception_handlers.py b/venv/lib/python3.12/site-packages/fastapi/exception_handlers.py deleted file mode 100644 index 6c2ba7fe..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/exception_handlers.py +++ /dev/null @@ -1,34 +0,0 @@ -from fastapi.encoders import jsonable_encoder -from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError -from fastapi.utils import is_body_allowed_for_status_code -from fastapi.websockets import WebSocket -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.responses import JSONResponse, Response -from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY, WS_1008_POLICY_VIOLATION - - -async def http_exception_handler(request: Request, exc: HTTPException) -> Response: - headers = getattr(exc, "headers", None) - if not is_body_allowed_for_status_code(exc.status_code): - return Response(status_code=exc.status_code, headers=headers) - return JSONResponse( - {"detail": exc.detail}, status_code=exc.status_code, headers=headers - ) - - -async def request_validation_exception_handler( - request: Request, exc: RequestValidationError -) -> JSONResponse: - return JSONResponse( - status_code=HTTP_422_UNPROCESSABLE_ENTITY, - content={"detail": jsonable_encoder(exc.errors())}, - ) - - -async def websocket_request_validation_exception_handler( - websocket: WebSocket, exc: WebSocketRequestValidationError -) -> None: - await websocket.close( - code=WS_1008_POLICY_VIOLATION, reason=jsonable_encoder(exc.errors()) - ) diff --git a/venv/lib/python3.12/site-packages/fastapi/exceptions.py b/venv/lib/python3.12/site-packages/fastapi/exceptions.py deleted file mode 100644 index 44d4ada8..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/exceptions.py +++ /dev/null @@ -1,176 +0,0 @@ -from typing import Any, Dict, Optional, Sequence, Type, Union - -from pydantic import BaseModel, create_model -from starlette.exceptions import HTTPException as StarletteHTTPException -from starlette.exceptions import WebSocketException as StarletteWebSocketException -from typing_extensions import Annotated, Doc - - -class HTTPException(StarletteHTTPException): - """ - An HTTP exception you can raise in your own code to show errors to the client. - - This is for client errors, invalid authentication, invalid data, etc. Not for server - errors in your code. - - Read more about it in the - [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). - - ## Example - - ```python - from fastapi import FastAPI, HTTPException - - app = FastAPI() - - items = {"foo": "The Foo Wrestlers"} - - - @app.get("/items/{item_id}") - async def read_item(item_id: str): - if item_id not in items: - raise HTTPException(status_code=404, detail="Item not found") - return {"item": items[item_id]} - ``` - """ - - def __init__( - self, - status_code: Annotated[ - int, - Doc( - """ - HTTP status code to send to the client. - """ - ), - ], - detail: Annotated[ - Any, - Doc( - """ - Any data to be sent to the client in the `detail` key of the JSON - response. - """ - ), - ] = None, - headers: Annotated[ - Optional[Dict[str, str]], - Doc( - """ - Any headers to send to the client in the response. - """ - ), - ] = None, - ) -> None: - super().__init__(status_code=status_code, detail=detail, headers=headers) - - -class WebSocketException(StarletteWebSocketException): - """ - A WebSocket exception you can raise in your own code to show errors to the client. - - This is for client errors, invalid authentication, invalid data, etc. Not for server - errors in your code. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import ( - Cookie, - FastAPI, - WebSocket, - WebSocketException, - status, - ) - - app = FastAPI() - - @app.websocket("/items/{item_id}/ws") - async def websocket_endpoint( - *, - websocket: WebSocket, - session: Annotated[str | None, Cookie()] = None, - item_id: str, - ): - if session is None: - raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION) - await websocket.accept() - while True: - data = await websocket.receive_text() - await websocket.send_text(f"Session cookie is: {session}") - await websocket.send_text(f"Message text was: {data}, for item ID: {item_id}") - ``` - """ - - def __init__( - self, - code: Annotated[ - int, - Doc( - """ - A closing code from the - [valid codes defined in the specification](https://datatracker.ietf.org/doc/html/rfc6455#section-7.4.1). - """ - ), - ], - reason: Annotated[ - Union[str, None], - Doc( - """ - The reason to close the WebSocket connection. - - It is UTF-8-encoded data. The interpretation of the reason is up to the - application, it is not specified by the WebSocket specification. - - It could contain text that could be human-readable or interpretable - by the client code, etc. - """ - ), - ] = None, - ) -> None: - super().__init__(code=code, reason=reason) - - -RequestErrorModel: Type[BaseModel] = create_model("Request") -WebSocketErrorModel: Type[BaseModel] = create_model("WebSocket") - - -class FastAPIError(RuntimeError): - """ - A generic, FastAPI-specific error. - """ - - -class ValidationException(Exception): - def __init__(self, errors: Sequence[Any]) -> None: - self._errors = errors - - def errors(self) -> Sequence[Any]: - return self._errors - - -class RequestValidationError(ValidationException): - def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None: - super().__init__(errors) - self.body = body - - -class WebSocketRequestValidationError(ValidationException): - pass - - -class ResponseValidationError(ValidationException): - def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None: - super().__init__(errors) - self.body = body - - def __str__(self) -> str: - message = f"{len(self._errors)} validation errors:\n" - for err in self._errors: - message += f" {err}\n" - return message diff --git a/venv/lib/python3.12/site-packages/fastapi/logger.py b/venv/lib/python3.12/site-packages/fastapi/logger.py deleted file mode 100644 index 5b2c4ad5..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/logger.py +++ /dev/null @@ -1,3 +0,0 @@ -import logging - -logger = logging.getLogger("fastapi") diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py b/venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py deleted file mode 100644 index 620296d5..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/middleware/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware import Middleware as Middleware diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 71c2205b..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/cors.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/cors.cpython-312.pyc deleted file mode 100644 index 1f95516f..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/cors.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/gzip.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/gzip.cpython-312.pyc deleted file mode 100644 index 74bafcf9..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/gzip.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc deleted file mode 100644 index 8cca161c..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/httpsredirect.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc deleted file mode 100644 index 19b1037e..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/trustedhost.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-312.pyc deleted file mode 100644 index 5927be69..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/middleware/__pycache__/wsgi.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/cors.py b/venv/lib/python3.12/site-packages/fastapi/middleware/cors.py deleted file mode 100644 index 8dfaad0d..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/middleware/cors.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware.cors import CORSMiddleware as CORSMiddleware # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py b/venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py deleted file mode 100644 index bbeb2cc7..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/middleware/gzip.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware.gzip import GZipMiddleware as GZipMiddleware # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py b/venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py deleted file mode 100644 index b7a3d8e0..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/middleware/httpsredirect.py +++ /dev/null @@ -1,3 +0,0 @@ -from starlette.middleware.httpsredirect import ( # noqa - HTTPSRedirectMiddleware as HTTPSRedirectMiddleware, -) diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py b/venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py deleted file mode 100644 index 08d7e035..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/middleware/trustedhost.py +++ /dev/null @@ -1,3 +0,0 @@ -from starlette.middleware.trustedhost import ( # noqa - TrustedHostMiddleware as TrustedHostMiddleware, -) diff --git a/venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py b/venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py deleted file mode 100644 index c4c6a797..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/middleware/wsgi.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware.wsgi import WSGIMiddleware as WSGIMiddleware # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__init__.py b/venv/lib/python3.12/site-packages/fastapi/openapi/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 1126b938..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/constants.cpython-312.pyc deleted file mode 100644 index e7106d44..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/constants.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/docs.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/docs.cpython-312.pyc deleted file mode 100644 index a1769e44..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/docs.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/models.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/models.cpython-312.pyc deleted file mode 100644 index 8bda8ed5..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/models.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index a71cd8d0..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/openapi/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/constants.py b/venv/lib/python3.12/site-packages/fastapi/openapi/constants.py deleted file mode 100644 index d724ee3c..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/openapi/constants.py +++ /dev/null @@ -1,3 +0,0 @@ -METHODS_WITH_BODY = {"GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"} -REF_PREFIX = "#/components/schemas/" -REF_TEMPLATE = "#/components/schemas/{model}" diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/docs.py b/venv/lib/python3.12/site-packages/fastapi/openapi/docs.py deleted file mode 100644 index f181b43c..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/openapi/docs.py +++ /dev/null @@ -1,344 +0,0 @@ -import json -from typing import Any, Dict, Optional - -from fastapi.encoders import jsonable_encoder -from starlette.responses import HTMLResponse -from typing_extensions import Annotated, Doc - -swagger_ui_default_parameters: Annotated[ - Dict[str, Any], - Doc( - """ - Default configurations for Swagger UI. - - You can use it as a template to add any other configurations needed. - """ - ), -] = { - "dom_id": "#swagger-ui", - "layout": "BaseLayout", - "deepLinking": True, - "showExtensions": True, - "showCommonExtensions": True, -} - - -def get_swagger_ui_html( - *, - openapi_url: Annotated[ - str, - Doc( - """ - The OpenAPI URL that Swagger UI should load and use. - - This is normally done automatically by FastAPI using the default URL - `/openapi.json`. - """ - ), - ], - title: Annotated[ - str, - Doc( - """ - The HTML `` content, normally shown in the browser tab. - """ - ), - ], - swagger_js_url: Annotated[ - str, - Doc( - """ - The URL to use to load the Swagger UI JavaScript. - - It is normally set to a CDN URL. - """ - ), - ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui-bundle.js", - swagger_css_url: Annotated[ - str, - Doc( - """ - The URL to use to load the Swagger UI CSS. - - It is normally set to a CDN URL. - """ - ), - ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css", - swagger_favicon_url: Annotated[ - str, - Doc( - """ - The URL of the favicon to use. It is normally shown in the browser tab. - """ - ), - ] = "https://fastapi.tiangolo.com/img/favicon.png", - oauth2_redirect_url: Annotated[ - Optional[str], - Doc( - """ - The OAuth2 redirect URL, it is normally automatically handled by FastAPI. - """ - ), - ] = None, - init_oauth: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - A dictionary with Swagger UI OAuth2 initialization configurations. - """ - ), - ] = None, - swagger_ui_parameters: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Configuration parameters for Swagger UI. - - It defaults to [swagger_ui_default_parameters][fastapi.openapi.docs.swagger_ui_default_parameters]. - """ - ), - ] = None, -) -> HTMLResponse: - """ - Generate and return the HTML that loads Swagger UI for the interactive - API docs (normally served at `/docs`). - - You would only call this function yourself if you needed to override some parts, - for example the URLs to use to load Swagger UI's JavaScript and CSS. - - Read more about it in the - [FastAPI docs for Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/) - and the [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). - """ - current_swagger_ui_parameters = swagger_ui_default_parameters.copy() - if swagger_ui_parameters: - current_swagger_ui_parameters.update(swagger_ui_parameters) - - html = f""" - <!DOCTYPE html> - <html> - <head> - <link type="text/css" rel="stylesheet" href="{swagger_css_url}"> - <link rel="shortcut icon" href="{swagger_favicon_url}"> - <title>{title} - - -
-
- - - - - - """ - return HTMLResponse(html) - - -def get_redoc_html( - *, - openapi_url: Annotated[ - str, - Doc( - """ - The OpenAPI URL that ReDoc should load and use. - - This is normally done automatically by FastAPI using the default URL - `/openapi.json`. - """ - ), - ], - title: Annotated[ - str, - Doc( - """ - The HTML `` content, normally shown in the browser tab. - """ - ), - ], - redoc_js_url: Annotated[ - str, - Doc( - """ - The URL to use to load the ReDoc JavaScript. - - It is normally set to a CDN URL. - """ - ), - ] = "https://cdn.jsdelivr.net/npm/redoc@2/bundles/redoc.standalone.js", - redoc_favicon_url: Annotated[ - str, - Doc( - """ - The URL of the favicon to use. It is normally shown in the browser tab. - """ - ), - ] = "https://fastapi.tiangolo.com/img/favicon.png", - with_google_fonts: Annotated[ - bool, - Doc( - """ - Load and use Google Fonts. - """ - ), - ] = True, -) -> HTMLResponse: - """ - Generate and return the HTML response that loads ReDoc for the alternative - API docs (normally served at `/redoc`). - - You would only call this function yourself if you needed to override some parts, - for example the URLs to use to load ReDoc's JavaScript and CSS. - - Read more about it in the - [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). - """ - html = f""" - <!DOCTYPE html> - <html> - <head> - <title>{title} - - - - """ - if with_google_fonts: - html += """ - - """ - html += f""" - - - - - - - - - - - """ - return HTMLResponse(html) - - -def get_swagger_ui_oauth2_redirect_html() -> HTMLResponse: - """ - Generate the HTML response with the OAuth2 redirection for Swagger UI. - - You normally don't need to use or change this. - """ - # copied from https://github.com/swagger-api/swagger-ui/blob/v4.14.0/dist/oauth2-redirect.html - html = """ - - - - Swagger UI: OAuth2 Redirect - - - - - - """ - return HTMLResponse(content=html) diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/models.py b/venv/lib/python3.12/site-packages/fastapi/openapi/models.py deleted file mode 100644 index ed07b40f..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/openapi/models.py +++ /dev/null @@ -1,445 +0,0 @@ -from enum import Enum -from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Type, Union - -from fastapi._compat import ( - PYDANTIC_V2, - CoreSchema, - GetJsonSchemaHandler, - JsonSchemaValue, - _model_rebuild, - with_info_plain_validator_function, -) -from fastapi.logger import logger -from pydantic import AnyUrl, BaseModel, Field -from typing_extensions import Annotated, Literal, TypedDict -from typing_extensions import deprecated as typing_deprecated - -try: - import email_validator - - assert email_validator # make autoflake ignore the unused import - from pydantic import EmailStr -except ImportError: # pragma: no cover - - class EmailStr(str): # type: ignore - @classmethod - def __get_validators__(cls) -> Iterable[Callable[..., Any]]: - yield cls.validate - - @classmethod - def validate(cls, v: Any) -> str: - logger.warning( - "email-validator not installed, email fields will be treated as str.\n" - "To install, run: pip install email-validator" - ) - return str(v) - - @classmethod - def _validate(cls, __input_value: Any, _: Any) -> str: - logger.warning( - "email-validator not installed, email fields will be treated as str.\n" - "To install, run: pip install email-validator" - ) - return str(__input_value) - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - return {"type": "string", "format": "email"} - - @classmethod - def __get_pydantic_core_schema__( - cls, source: Type[Any], handler: Callable[[Any], CoreSchema] - ) -> CoreSchema: - return with_info_plain_validator_function(cls._validate) - - -class BaseModelWithConfig(BaseModel): - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Contact(BaseModelWithConfig): - name: Optional[str] = None - url: Optional[AnyUrl] = None - email: Optional[EmailStr] = None - - -class License(BaseModelWithConfig): - name: str - identifier: Optional[str] = None - url: Optional[AnyUrl] = None - - -class Info(BaseModelWithConfig): - title: str - summary: Optional[str] = None - description: Optional[str] = None - termsOfService: Optional[str] = None - contact: Optional[Contact] = None - license: Optional[License] = None - version: str - - -class ServerVariable(BaseModelWithConfig): - enum: Annotated[Optional[List[str]], Field(min_length=1)] = None - default: str - description: Optional[str] = None - - -class Server(BaseModelWithConfig): - url: Union[AnyUrl, str] - description: Optional[str] = None - variables: Optional[Dict[str, ServerVariable]] = None - - -class Reference(BaseModel): - ref: str = Field(alias="$ref") - - -class Discriminator(BaseModel): - propertyName: str - mapping: Optional[Dict[str, str]] = None - - -class XML(BaseModelWithConfig): - name: Optional[str] = None - namespace: Optional[str] = None - prefix: Optional[str] = None - attribute: Optional[bool] = None - wrapped: Optional[bool] = None - - -class ExternalDocumentation(BaseModelWithConfig): - description: Optional[str] = None - url: AnyUrl - - -class Schema(BaseModelWithConfig): - # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-the-json-schema-core-vocabu - # Core Vocabulary - schema_: Optional[str] = Field(default=None, alias="$schema") - vocabulary: Optional[str] = Field(default=None, alias="$vocabulary") - id: Optional[str] = Field(default=None, alias="$id") - anchor: Optional[str] = Field(default=None, alias="$anchor") - dynamicAnchor: Optional[str] = Field(default=None, alias="$dynamicAnchor") - ref: Optional[str] = Field(default=None, alias="$ref") - dynamicRef: Optional[str] = Field(default=None, alias="$dynamicRef") - defs: Optional[Dict[str, "SchemaOrBool"]] = Field(default=None, alias="$defs") - comment: Optional[str] = Field(default=None, alias="$comment") - # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-a-vocabulary-for-applying-s - # A Vocabulary for Applying Subschemas - allOf: Optional[List["SchemaOrBool"]] = None - anyOf: Optional[List["SchemaOrBool"]] = None - oneOf: Optional[List["SchemaOrBool"]] = None - not_: Optional["SchemaOrBool"] = Field(default=None, alias="not") - if_: Optional["SchemaOrBool"] = Field(default=None, alias="if") - then: Optional["SchemaOrBool"] = None - else_: Optional["SchemaOrBool"] = Field(default=None, alias="else") - dependentSchemas: Optional[Dict[str, "SchemaOrBool"]] = None - prefixItems: Optional[List["SchemaOrBool"]] = None - # TODO: uncomment and remove below when deprecating Pydantic v1 - # It generales a list of schemas for tuples, before prefixItems was available - # items: Optional["SchemaOrBool"] = None - items: Optional[Union["SchemaOrBool", List["SchemaOrBool"]]] = None - contains: Optional["SchemaOrBool"] = None - properties: Optional[Dict[str, "SchemaOrBool"]] = None - patternProperties: Optional[Dict[str, "SchemaOrBool"]] = None - additionalProperties: Optional["SchemaOrBool"] = None - propertyNames: Optional["SchemaOrBool"] = None - unevaluatedItems: Optional["SchemaOrBool"] = None - unevaluatedProperties: Optional["SchemaOrBool"] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-structural - # A Vocabulary for Structural Validation - type: Optional[str] = None - enum: Optional[List[Any]] = None - const: Optional[Any] = None - multipleOf: Optional[float] = Field(default=None, gt=0) - maximum: Optional[float] = None - exclusiveMaximum: Optional[float] = None - minimum: Optional[float] = None - exclusiveMinimum: Optional[float] = None - maxLength: Optional[int] = Field(default=None, ge=0) - minLength: Optional[int] = Field(default=None, ge=0) - pattern: Optional[str] = None - maxItems: Optional[int] = Field(default=None, ge=0) - minItems: Optional[int] = Field(default=None, ge=0) - uniqueItems: Optional[bool] = None - maxContains: Optional[int] = Field(default=None, ge=0) - minContains: Optional[int] = Field(default=None, ge=0) - maxProperties: Optional[int] = Field(default=None, ge=0) - minProperties: Optional[int] = Field(default=None, ge=0) - required: Optional[List[str]] = None - dependentRequired: Optional[Dict[str, Set[str]]] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-vocabularies-for-semantic-c - # Vocabularies for Semantic Content With "format" - format: Optional[str] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-the-conten - # A Vocabulary for the Contents of String-Encoded Data - contentEncoding: Optional[str] = None - contentMediaType: Optional[str] = None - contentSchema: Optional["SchemaOrBool"] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-basic-meta - # A Vocabulary for Basic Meta-Data Annotations - title: Optional[str] = None - description: Optional[str] = None - default: Optional[Any] = None - deprecated: Optional[bool] = None - readOnly: Optional[bool] = None - writeOnly: Optional[bool] = None - examples: Optional[List[Any]] = None - # Ref: OpenAPI 3.1.0: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#schema-object - # Schema Object - discriminator: Optional[Discriminator] = None - xml: Optional[XML] = None - externalDocs: Optional[ExternalDocumentation] = None - example: Annotated[ - Optional[Any], - typing_deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = None - - -# Ref: https://json-schema.org/draft/2020-12/json-schema-core.html#name-json-schema-documents -# A JSON Schema MUST be an object or a boolean. -SchemaOrBool = Union[Schema, bool] - - -class Example(TypedDict, total=False): - summary: Optional[str] - description: Optional[str] - value: Optional[Any] - externalValue: Optional[AnyUrl] - - if PYDANTIC_V2: # type: ignore [misc] - __pydantic_config__ = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class ParameterInType(Enum): - query = "query" - header = "header" - path = "path" - cookie = "cookie" - - -class Encoding(BaseModelWithConfig): - contentType: Optional[str] = None - headers: Optional[Dict[str, Union["Header", Reference]]] = None - style: Optional[str] = None - explode: Optional[bool] = None - allowReserved: Optional[bool] = None - - -class MediaType(BaseModelWithConfig): - schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") - example: Optional[Any] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - encoding: Optional[Dict[str, Encoding]] = None - - -class ParameterBase(BaseModelWithConfig): - description: Optional[str] = None - required: Optional[bool] = None - deprecated: Optional[bool] = None - # Serialization rules for simple scenarios - style: Optional[str] = None - explode: Optional[bool] = None - allowReserved: Optional[bool] = None - schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") - example: Optional[Any] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - # Serialization rules for more complex scenarios - content: Optional[Dict[str, MediaType]] = None - - -class Parameter(ParameterBase): - name: str - in_: ParameterInType = Field(alias="in") - - -class Header(ParameterBase): - pass - - -class RequestBody(BaseModelWithConfig): - description: Optional[str] = None - content: Dict[str, MediaType] - required: Optional[bool] = None - - -class Link(BaseModelWithConfig): - operationRef: Optional[str] = None - operationId: Optional[str] = None - parameters: Optional[Dict[str, Union[Any, str]]] = None - requestBody: Optional[Union[Any, str]] = None - description: Optional[str] = None - server: Optional[Server] = None - - -class Response(BaseModelWithConfig): - description: str - headers: Optional[Dict[str, Union[Header, Reference]]] = None - content: Optional[Dict[str, MediaType]] = None - links: Optional[Dict[str, Union[Link, Reference]]] = None - - -class Operation(BaseModelWithConfig): - tags: Optional[List[str]] = None - summary: Optional[str] = None - description: Optional[str] = None - externalDocs: Optional[ExternalDocumentation] = None - operationId: Optional[str] = None - parameters: Optional[List[Union[Parameter, Reference]]] = None - requestBody: Optional[Union[RequestBody, Reference]] = None - # Using Any for Specification Extensions - responses: Optional[Dict[str, Union[Response, Any]]] = None - callbacks: Optional[Dict[str, Union[Dict[str, "PathItem"], Reference]]] = None - deprecated: Optional[bool] = None - security: Optional[List[Dict[str, List[str]]]] = None - servers: Optional[List[Server]] = None - - -class PathItem(BaseModelWithConfig): - ref: Optional[str] = Field(default=None, alias="$ref") - summary: Optional[str] = None - description: Optional[str] = None - get: Optional[Operation] = None - put: Optional[Operation] = None - post: Optional[Operation] = None - delete: Optional[Operation] = None - options: Optional[Operation] = None - head: Optional[Operation] = None - patch: Optional[Operation] = None - trace: Optional[Operation] = None - servers: Optional[List[Server]] = None - parameters: Optional[List[Union[Parameter, Reference]]] = None - - -class SecuritySchemeType(Enum): - apiKey = "apiKey" - http = "http" - oauth2 = "oauth2" - openIdConnect = "openIdConnect" - - -class SecurityBase(BaseModelWithConfig): - type_: SecuritySchemeType = Field(alias="type") - description: Optional[str] = None - - -class APIKeyIn(Enum): - query = "query" - header = "header" - cookie = "cookie" - - -class APIKey(SecurityBase): - type_: SecuritySchemeType = Field(default=SecuritySchemeType.apiKey, alias="type") - in_: APIKeyIn = Field(alias="in") - name: str - - -class HTTPBase(SecurityBase): - type_: SecuritySchemeType = Field(default=SecuritySchemeType.http, alias="type") - scheme: str - - -class HTTPBearer(HTTPBase): - scheme: Literal["bearer"] = "bearer" - bearerFormat: Optional[str] = None - - -class OAuthFlow(BaseModelWithConfig): - refreshUrl: Optional[str] = None - scopes: Dict[str, str] = {} - - -class OAuthFlowImplicit(OAuthFlow): - authorizationUrl: str - - -class OAuthFlowPassword(OAuthFlow): - tokenUrl: str - - -class OAuthFlowClientCredentials(OAuthFlow): - tokenUrl: str - - -class OAuthFlowAuthorizationCode(OAuthFlow): - authorizationUrl: str - tokenUrl: str - - -class OAuthFlows(BaseModelWithConfig): - implicit: Optional[OAuthFlowImplicit] = None - password: Optional[OAuthFlowPassword] = None - clientCredentials: Optional[OAuthFlowClientCredentials] = None - authorizationCode: Optional[OAuthFlowAuthorizationCode] = None - - -class OAuth2(SecurityBase): - type_: SecuritySchemeType = Field(default=SecuritySchemeType.oauth2, alias="type") - flows: OAuthFlows - - -class OpenIdConnect(SecurityBase): - type_: SecuritySchemeType = Field( - default=SecuritySchemeType.openIdConnect, alias="type" - ) - openIdConnectUrl: str - - -SecurityScheme = Union[APIKey, HTTPBase, OAuth2, OpenIdConnect, HTTPBearer] - - -class Components(BaseModelWithConfig): - schemas: Optional[Dict[str, Union[Schema, Reference]]] = None - responses: Optional[Dict[str, Union[Response, Reference]]] = None - parameters: Optional[Dict[str, Union[Parameter, Reference]]] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - requestBodies: Optional[Dict[str, Union[RequestBody, Reference]]] = None - headers: Optional[Dict[str, Union[Header, Reference]]] = None - securitySchemes: Optional[Dict[str, Union[SecurityScheme, Reference]]] = None - links: Optional[Dict[str, Union[Link, Reference]]] = None - # Using Any for Specification Extensions - callbacks: Optional[Dict[str, Union[Dict[str, PathItem], Reference, Any]]] = None - pathItems: Optional[Dict[str, Union[PathItem, Reference]]] = None - - -class Tag(BaseModelWithConfig): - name: str - description: Optional[str] = None - externalDocs: Optional[ExternalDocumentation] = None - - -class OpenAPI(BaseModelWithConfig): - openapi: str - info: Info - jsonSchemaDialect: Optional[str] = None - servers: Optional[List[Server]] = None - # Using Any for Specification Extensions - paths: Optional[Dict[str, Union[PathItem, Any]]] = None - webhooks: Optional[Dict[str, Union[PathItem, Reference]]] = None - components: Optional[Components] = None - security: Optional[List[Dict[str, List[str]]]] = None - tags: Optional[List[Tag]] = None - externalDocs: Optional[ExternalDocumentation] = None - - -_model_rebuild(Schema) -_model_rebuild(Operation) -_model_rebuild(Encoding) diff --git a/venv/lib/python3.12/site-packages/fastapi/openapi/utils.py b/venv/lib/python3.12/site-packages/fastapi/openapi/utils.py deleted file mode 100644 index 808646cc..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/openapi/utils.py +++ /dev/null @@ -1,569 +0,0 @@ -import http.client -import inspect -import warnings -from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Type, Union, cast - -from fastapi import routing -from fastapi._compat import ( - GenerateJsonSchema, - JsonSchemaValue, - ModelField, - Undefined, - get_compat_model_name_map, - get_definitions, - get_schema_from_model_field, - lenient_issubclass, -) -from fastapi.datastructures import DefaultPlaceholder -from fastapi.dependencies.models import Dependant -from fastapi.dependencies.utils import ( - _get_flat_fields_from_params, - get_flat_dependant, - get_flat_params, -) -from fastapi.encoders import jsonable_encoder -from fastapi.openapi.constants import METHODS_WITH_BODY, REF_PREFIX, REF_TEMPLATE -from fastapi.openapi.models import OpenAPI -from fastapi.params import Body, ParamTypes -from fastapi.responses import Response -from fastapi.types import ModelNameMap -from fastapi.utils import ( - deep_dict_update, - generate_operation_id_for_path, - is_body_allowed_for_status_code, -) -from pydantic import BaseModel -from starlette.responses import JSONResponse -from starlette.routing import BaseRoute -from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY -from typing_extensions import Literal - -validation_error_definition = { - "title": "ValidationError", - "type": "object", - "properties": { - "loc": { - "title": "Location", - "type": "array", - "items": {"anyOf": [{"type": "string"}, {"type": "integer"}]}, - }, - "msg": {"title": "Message", "type": "string"}, - "type": {"title": "Error Type", "type": "string"}, - }, - "required": ["loc", "msg", "type"], -} - -validation_error_response_definition = { - "title": "HTTPValidationError", - "type": "object", - "properties": { - "detail": { - "title": "Detail", - "type": "array", - "items": {"$ref": REF_PREFIX + "ValidationError"}, - } - }, -} - -status_code_ranges: Dict[str, str] = { - "1XX": "Information", - "2XX": "Success", - "3XX": "Redirection", - "4XX": "Client Error", - "5XX": "Server Error", - "DEFAULT": "Default Response", -} - - -def get_openapi_security_definitions( - flat_dependant: Dependant, -) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: - security_definitions = {} - operation_security = [] - for security_requirement in flat_dependant.security_requirements: - security_definition = jsonable_encoder( - security_requirement.security_scheme.model, - by_alias=True, - exclude_none=True, - ) - security_name = security_requirement.security_scheme.scheme_name - security_definitions[security_name] = security_definition - operation_security.append({security_name: security_requirement.scopes}) - return security_definitions, operation_security - - -def _get_openapi_operation_parameters( - *, - dependant: Dependant, - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, -) -> List[Dict[str, Any]]: - parameters = [] - flat_dependant = get_flat_dependant(dependant, skip_repeats=True) - path_params = _get_flat_fields_from_params(flat_dependant.path_params) - query_params = _get_flat_fields_from_params(flat_dependant.query_params) - header_params = _get_flat_fields_from_params(flat_dependant.header_params) - cookie_params = _get_flat_fields_from_params(flat_dependant.cookie_params) - parameter_groups = [ - (ParamTypes.path, path_params), - (ParamTypes.query, query_params), - (ParamTypes.header, header_params), - (ParamTypes.cookie, cookie_params), - ] - default_convert_underscores = True - if len(flat_dependant.header_params) == 1: - first_field = flat_dependant.header_params[0] - if lenient_issubclass(first_field.type_, BaseModel): - default_convert_underscores = getattr( - first_field.field_info, "convert_underscores", True - ) - for param_type, param_group in parameter_groups: - for param in param_group: - field_info = param.field_info - # field_info = cast(Param, field_info) - if not getattr(field_info, "include_in_schema", True): - continue - param_schema = get_schema_from_model_field( - field=param, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - name = param.alias - convert_underscores = getattr( - param.field_info, - "convert_underscores", - default_convert_underscores, - ) - if ( - param_type == ParamTypes.header - and param.alias == param.name - and convert_underscores - ): - name = param.name.replace("_", "-") - - parameter = { - "name": name, - "in": param_type.value, - "required": param.required, - "schema": param_schema, - } - if field_info.description: - parameter["description"] = field_info.description - openapi_examples = getattr(field_info, "openapi_examples", None) - example = getattr(field_info, "example", None) - if openapi_examples: - parameter["examples"] = jsonable_encoder(openapi_examples) - elif example != Undefined: - parameter["example"] = jsonable_encoder(example) - if getattr(field_info, "deprecated", None): - parameter["deprecated"] = True - parameters.append(parameter) - return parameters - - -def get_openapi_operation_request_body( - *, - body_field: Optional[ModelField], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, -) -> Optional[Dict[str, Any]]: - if not body_field: - return None - assert isinstance(body_field, ModelField) - body_schema = get_schema_from_model_field( - field=body_field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - field_info = cast(Body, body_field.field_info) - request_media_type = field_info.media_type - required = body_field.required - request_body_oai: Dict[str, Any] = {} - if required: - request_body_oai["required"] = required - request_media_content: Dict[str, Any] = {"schema": body_schema} - if field_info.openapi_examples: - request_media_content["examples"] = jsonable_encoder( - field_info.openapi_examples - ) - elif field_info.example != Undefined: - request_media_content["example"] = jsonable_encoder(field_info.example) - request_body_oai["content"] = {request_media_type: request_media_content} - return request_body_oai - - -def generate_operation_id( - *, route: routing.APIRoute, method: str -) -> str: # pragma: nocover - warnings.warn( - "fastapi.openapi.utils.generate_operation_id() was deprecated, " - "it is not used internally, and will be removed soon", - DeprecationWarning, - stacklevel=2, - ) - if route.operation_id: - return route.operation_id - path: str = route.path_format - return generate_operation_id_for_path(name=route.name, path=path, method=method) - - -def generate_operation_summary(*, route: routing.APIRoute, method: str) -> str: - if route.summary: - return route.summary - return route.name.replace("_", " ").title() - - -def get_openapi_operation_metadata( - *, route: routing.APIRoute, method: str, operation_ids: Set[str] -) -> Dict[str, Any]: - operation: Dict[str, Any] = {} - if route.tags: - operation["tags"] = route.tags - operation["summary"] = generate_operation_summary(route=route, method=method) - if route.description: - operation["description"] = route.description - operation_id = route.operation_id or route.unique_id - if operation_id in operation_ids: - message = ( - f"Duplicate Operation ID {operation_id} for function " - + f"{route.endpoint.__name__}" - ) - file_name = getattr(route.endpoint, "__globals__", {}).get("__file__") - if file_name: - message += f" at {file_name}" - warnings.warn(message, stacklevel=1) - operation_ids.add(operation_id) - operation["operationId"] = operation_id - if route.deprecated: - operation["deprecated"] = route.deprecated - return operation - - -def get_openapi_path( - *, - route: routing.APIRoute, - operation_ids: Set[str], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, -) -> Tuple[Dict[str, Any], Dict[str, Any], Dict[str, Any]]: - path = {} - security_schemes: Dict[str, Any] = {} - definitions: Dict[str, Any] = {} - assert route.methods is not None, "Methods must be a list" - if isinstance(route.response_class, DefaultPlaceholder): - current_response_class: Type[Response] = route.response_class.value - else: - current_response_class = route.response_class - assert current_response_class, "A response class is needed to generate OpenAPI" - route_response_media_type: Optional[str] = current_response_class.media_type - if route.include_in_schema: - for method in route.methods: - operation = get_openapi_operation_metadata( - route=route, method=method, operation_ids=operation_ids - ) - parameters: List[Dict[str, Any]] = [] - flat_dependant = get_flat_dependant(route.dependant, skip_repeats=True) - security_definitions, operation_security = get_openapi_security_definitions( - flat_dependant=flat_dependant - ) - if operation_security: - operation.setdefault("security", []).extend(operation_security) - if security_definitions: - security_schemes.update(security_definitions) - operation_parameters = _get_openapi_operation_parameters( - dependant=route.dependant, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - parameters.extend(operation_parameters) - if parameters: - all_parameters = { - (param["in"], param["name"]): param for param in parameters - } - required_parameters = { - (param["in"], param["name"]): param - for param in parameters - if param.get("required") - } - # Make sure required definitions of the same parameter take precedence - # over non-required definitions - all_parameters.update(required_parameters) - operation["parameters"] = list(all_parameters.values()) - if method in METHODS_WITH_BODY: - request_body_oai = get_openapi_operation_request_body( - body_field=route.body_field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - if request_body_oai: - operation["requestBody"] = request_body_oai - if route.callbacks: - callbacks = {} - for callback in route.callbacks: - if isinstance(callback, routing.APIRoute): - ( - cb_path, - cb_security_schemes, - cb_definitions, - ) = get_openapi_path( - route=callback, - operation_ids=operation_ids, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - callbacks[callback.name] = {callback.path: cb_path} - operation["callbacks"] = callbacks - if route.status_code is not None: - status_code = str(route.status_code) - else: - # It would probably make more sense for all response classes to have an - # explicit default status_code, and to extract it from them, instead of - # doing this inspection tricks, that would probably be in the future - # TODO: probably make status_code a default class attribute for all - # responses in Starlette - response_signature = inspect.signature(current_response_class.__init__) - status_code_param = response_signature.parameters.get("status_code") - if status_code_param is not None: - if isinstance(status_code_param.default, int): - status_code = str(status_code_param.default) - operation.setdefault("responses", {}).setdefault(status_code, {})[ - "description" - ] = route.response_description - if route_response_media_type and is_body_allowed_for_status_code( - route.status_code - ): - response_schema = {"type": "string"} - if lenient_issubclass(current_response_class, JSONResponse): - if route.response_field: - response_schema = get_schema_from_model_field( - field=route.response_field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - else: - response_schema = {} - operation.setdefault("responses", {}).setdefault( - status_code, {} - ).setdefault("content", {}).setdefault(route_response_media_type, {})[ - "schema" - ] = response_schema - if route.responses: - operation_responses = operation.setdefault("responses", {}) - for ( - additional_status_code, - additional_response, - ) in route.responses.items(): - process_response = additional_response.copy() - process_response.pop("model", None) - status_code_key = str(additional_status_code).upper() - if status_code_key == "DEFAULT": - status_code_key = "default" - openapi_response = operation_responses.setdefault( - status_code_key, {} - ) - assert isinstance(process_response, dict), ( - "An additional response must be a dict" - ) - field = route.response_fields.get(additional_status_code) - additional_field_schema: Optional[Dict[str, Any]] = None - if field: - additional_field_schema = get_schema_from_model_field( - field=field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - media_type = route_response_media_type or "application/json" - additional_schema = ( - process_response.setdefault("content", {}) - .setdefault(media_type, {}) - .setdefault("schema", {}) - ) - deep_dict_update(additional_schema, additional_field_schema) - status_text: Optional[str] = status_code_ranges.get( - str(additional_status_code).upper() - ) or http.client.responses.get(int(additional_status_code)) - description = ( - process_response.get("description") - or openapi_response.get("description") - or status_text - or "Additional Response" - ) - deep_dict_update(openapi_response, process_response) - openapi_response["description"] = description - http422 = str(HTTP_422_UNPROCESSABLE_ENTITY) - all_route_params = get_flat_params(route.dependant) - if (all_route_params or route.body_field) and not any( - status in operation["responses"] - for status in [http422, "4XX", "default"] - ): - operation["responses"][http422] = { - "description": "Validation Error", - "content": { - "application/json": { - "schema": {"$ref": REF_PREFIX + "HTTPValidationError"} - } - }, - } - if "ValidationError" not in definitions: - definitions.update( - { - "ValidationError": validation_error_definition, - "HTTPValidationError": validation_error_response_definition, - } - ) - if route.openapi_extra: - deep_dict_update(operation, route.openapi_extra) - path[method.lower()] = operation - return path, security_schemes, definitions - - -def get_fields_from_routes( - routes: Sequence[BaseRoute], -) -> List[ModelField]: - body_fields_from_routes: List[ModelField] = [] - responses_from_routes: List[ModelField] = [] - request_fields_from_routes: List[ModelField] = [] - callback_flat_models: List[ModelField] = [] - for route in routes: - if getattr(route, "include_in_schema", None) and isinstance( - route, routing.APIRoute - ): - if route.body_field: - assert isinstance(route.body_field, ModelField), ( - "A request body must be a Pydantic Field" - ) - body_fields_from_routes.append(route.body_field) - if route.response_field: - responses_from_routes.append(route.response_field) - if route.response_fields: - responses_from_routes.extend(route.response_fields.values()) - if route.callbacks: - callback_flat_models.extend(get_fields_from_routes(route.callbacks)) - params = get_flat_params(route.dependant) - request_fields_from_routes.extend(params) - - flat_models = callback_flat_models + list( - body_fields_from_routes + responses_from_routes + request_fields_from_routes - ) - return flat_models - - -def get_openapi( - *, - title: str, - version: str, - openapi_version: str = "3.1.0", - summary: Optional[str] = None, - description: Optional[str] = None, - routes: Sequence[BaseRoute], - webhooks: Optional[Sequence[BaseRoute]] = None, - tags: Optional[List[Dict[str, Any]]] = None, - servers: Optional[List[Dict[str, Union[str, Any]]]] = None, - terms_of_service: Optional[str] = None, - contact: Optional[Dict[str, Union[str, Any]]] = None, - license_info: Optional[Dict[str, Union[str, Any]]] = None, - separate_input_output_schemas: bool = True, -) -> Dict[str, Any]: - info: Dict[str, Any] = {"title": title, "version": version} - if summary: - info["summary"] = summary - if description: - info["description"] = description - if terms_of_service: - info["termsOfService"] = terms_of_service - if contact: - info["contact"] = contact - if license_info: - info["license"] = license_info - output: Dict[str, Any] = {"openapi": openapi_version, "info": info} - if servers: - output["servers"] = servers - components: Dict[str, Dict[str, Any]] = {} - paths: Dict[str, Dict[str, Any]] = {} - webhook_paths: Dict[str, Dict[str, Any]] = {} - operation_ids: Set[str] = set() - all_fields = get_fields_from_routes(list(routes or []) + list(webhooks or [])) - model_name_map = get_compat_model_name_map(all_fields) - schema_generator = GenerateJsonSchema(ref_template=REF_TEMPLATE) - field_mapping, definitions = get_definitions( - fields=all_fields, - schema_generator=schema_generator, - model_name_map=model_name_map, - separate_input_output_schemas=separate_input_output_schemas, - ) - for route in routes or []: - if isinstance(route, routing.APIRoute): - result = get_openapi_path( - route=route, - operation_ids=operation_ids, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - if result: - path, security_schemes, path_definitions = result - if path: - paths.setdefault(route.path_format, {}).update(path) - if security_schemes: - components.setdefault("securitySchemes", {}).update( - security_schemes - ) - if path_definitions: - definitions.update(path_definitions) - for webhook in webhooks or []: - if isinstance(webhook, routing.APIRoute): - result = get_openapi_path( - route=webhook, - operation_ids=operation_ids, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - if result: - path, security_schemes, path_definitions = result - if path: - webhook_paths.setdefault(webhook.path_format, {}).update(path) - if security_schemes: - components.setdefault("securitySchemes", {}).update( - security_schemes - ) - if path_definitions: - definitions.update(path_definitions) - if definitions: - components["schemas"] = {k: definitions[k] for k in sorted(definitions)} - if components: - output["components"] = components - output["paths"] = paths - if webhook_paths: - output["webhooks"] = webhook_paths - if tags: - output["tags"] = tags - return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore diff --git a/venv/lib/python3.12/site-packages/fastapi/param_functions.py b/venv/lib/python3.12/site-packages/fastapi/param_functions.py deleted file mode 100644 index b3621626..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/param_functions.py +++ /dev/null @@ -1,2360 +0,0 @@ -from typing import Any, Callable, Dict, List, Optional, Sequence, Union - -from fastapi import params -from fastapi._compat import Undefined -from fastapi.openapi.models import Example -from typing_extensions import Annotated, Doc, deprecated - -_Unset: Any = Undefined - - -def Path( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = ..., - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Union[deprecated, str, bool, None], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - """ - Declare a path parameter for a *path operation*. - - Read more about it in the - [FastAPI docs for Path Parameters and Numeric Validations](https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/). - - ```python - from typing import Annotated - - from fastapi import FastAPI, Path - - app = FastAPI() - - - @app.get("/items/{item_id}") - async def read_items( - item_id: Annotated[int, Path(title="The ID of the item to get")], - ): - return {"item_id": item_id} - ``` - """ - return params.Path( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Query( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Union[deprecated, str, bool, None], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Query( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Header( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - convert_underscores: Annotated[ - bool, - Doc( - """ - Automatically convert underscores to hyphens in the parameter field name. - - Read more about it in the - [FastAPI docs for Header Parameters](https://fastapi.tiangolo.com/tutorial/header-params/#automatic-conversion) - """ - ), - ] = True, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Union[deprecated, str, bool, None], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Header( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - convert_underscores=convert_underscores, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Cookie( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Union[deprecated, str, bool, None], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Cookie( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Body( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - embed: Annotated[ - Union[bool, None], - Doc( - """ - When `embed` is `True`, the parameter will be expected in a JSON body as a - key instead of being the JSON body itself. - - This happens automatically when more than one `Body` parameter is declared. - - Read more about it in the - [FastAPI docs for Body - Multiple Parameters](https://fastapi.tiangolo.com/tutorial/body-multiple-params/#embed-a-single-body-parameter). - """ - ), - ] = None, - media_type: Annotated[ - str, - Doc( - """ - The media type of this parameter field. Changing it would affect the - generated OpenAPI, but currently it doesn't affect the parsing of the data. - """ - ), - ] = "application/json", - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Union[deprecated, str, bool, None], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Body( - default=default, - default_factory=default_factory, - embed=embed, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Form( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - media_type: Annotated[ - str, - Doc( - """ - The media type of this parameter field. Changing it would affect the - generated OpenAPI, but currently it doesn't affect the parsing of the data. - """ - ), - ] = "application/x-www-form-urlencoded", - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Union[deprecated, str, bool, None], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Form( - default=default, - default_factory=default_factory, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def File( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - media_type: Annotated[ - str, - Doc( - """ - The media type of this parameter field. Changing it would affect the - generated OpenAPI, but currently it doesn't affect the parsing of the data. - """ - ), - ] = "multipart/form-data", - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Union[deprecated, str, bool, None], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.File( - default=default, - default_factory=default_factory, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Depends( # noqa: N802 - dependency: Annotated[ - Optional[Callable[..., Any]], - Doc( - """ - A "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you, just pass the object - directly. - """ - ), - ] = None, - *, - use_cache: Annotated[ - bool, - Doc( - """ - By default, after a dependency is called the first time in a request, if - the dependency is declared again for the rest of the request (for example - if the dependency is needed by several dependencies), the value will be - re-used for the rest of the request. - - Set `use_cache` to `False` to disable this behavior and ensure the - dependency is called again (if declared more than once) in the same request. - """ - ), - ] = True, -) -> Any: - """ - Declare a FastAPI dependency. - - It takes a single "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you. - - Read more about it in the - [FastAPI docs for Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/). - - **Example** - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - - app = FastAPI() - - - async def common_parameters(q: str | None = None, skip: int = 0, limit: int = 100): - return {"q": q, "skip": skip, "limit": limit} - - - @app.get("/items/") - async def read_items(commons: Annotated[dict, Depends(common_parameters)]): - return commons - ``` - """ - return params.Depends(dependency=dependency, use_cache=use_cache) - - -def Security( # noqa: N802 - dependency: Annotated[ - Optional[Callable[..., Any]], - Doc( - """ - A "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you, just pass the object - directly. - """ - ), - ] = None, - *, - scopes: Annotated[ - Optional[Sequence[str]], - Doc( - """ - OAuth2 scopes required for the *path operation* that uses this Security - dependency. - - The term "scope" comes from the OAuth2 specification, it seems to be - intentionally vague and interpretable. It normally refers to permissions, - in cases to roles. - - These scopes are integrated with OpenAPI (and the API docs at `/docs`). - So they are visible in the OpenAPI specification. - ) - """ - ), - ] = None, - use_cache: Annotated[ - bool, - Doc( - """ - By default, after a dependency is called the first time in a request, if - the dependency is declared again for the rest of the request (for example - if the dependency is needed by several dependencies), the value will be - re-used for the rest of the request. - - Set `use_cache` to `False` to disable this behavior and ensure the - dependency is called again (if declared more than once) in the same request. - """ - ), - ] = True, -) -> Any: - """ - Declare a FastAPI Security dependency. - - The only difference with a regular dependency is that it can declare OAuth2 - scopes that will be integrated with OpenAPI and the automatic UI docs (by default - at `/docs`). - - It takes a single "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you. - - Read more about it in the - [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/) and - in the - [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). - - **Example** - - ```python - from typing import Annotated - - from fastapi import Security, FastAPI - - from .db import User - from .security import get_current_active_user - - app = FastAPI() - - @app.get("/users/me/items/") - async def read_own_items( - current_user: Annotated[User, Security(get_current_active_user, scopes=["items"])] - ): - return [{"item_id": "Foo", "owner": current_user.username}] - ``` - """ - return params.Security(dependency=dependency, scopes=scopes, use_cache=use_cache) diff --git a/venv/lib/python3.12/site-packages/fastapi/params.py b/venv/lib/python3.12/site-packages/fastapi/params.py deleted file mode 100644 index 8f5601dd..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/params.py +++ /dev/null @@ -1,786 +0,0 @@ -import warnings -from enum import Enum -from typing import Any, Callable, Dict, List, Optional, Sequence, Union - -from fastapi.openapi.models import Example -from pydantic.fields import FieldInfo -from typing_extensions import Annotated, deprecated - -from ._compat import ( - PYDANTIC_V2, - PYDANTIC_VERSION_MINOR_TUPLE, - Undefined, -) - -_Unset: Any = Undefined - - -class ParamTypes(Enum): - query = "query" - header = "header" - path = "path" - cookie = "cookie" - - -class Param(FieldInfo): - in_: ParamTypes - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - if example is not _Unset: - warnings.warn( - "`example` has been deprecated, please use `examples` instead", - category=DeprecationWarning, - stacklevel=4, - ) - self.example = example - self.include_in_schema = include_in_schema - self.openapi_examples = openapi_examples - kwargs = dict( - default=default, - default_factory=default_factory, - alias=alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - discriminator=discriminator, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - **extra, - ) - if examples is not None: - kwargs["examples"] = examples - if regex is not None: - warnings.warn( - "`regex` has been deprecated, please use `pattern` instead", - category=DeprecationWarning, - stacklevel=4, - ) - current_json_schema_extra = json_schema_extra or extra - if PYDANTIC_VERSION_MINOR_TUPLE < (2, 7): - self.deprecated = deprecated - else: - kwargs["deprecated"] = deprecated - if PYDANTIC_V2: - kwargs.update( - { - "annotation": annotation, - "alias_priority": alias_priority, - "validation_alias": validation_alias, - "serialization_alias": serialization_alias, - "strict": strict, - "json_schema_extra": current_json_schema_extra, - } - ) - kwargs["pattern"] = pattern or regex - else: - kwargs["regex"] = pattern or regex - kwargs.update(**current_json_schema_extra) - use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} - - super().__init__(**use_kwargs) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.default})" - - -class Path(Param): - in_ = ParamTypes.path - - def __init__( - self, - default: Any = ..., - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - assert default is ..., "Path parameters cannot have a default value" - self.in_ = self.in_ - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Query(Param): - in_ = ParamTypes.query - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Header(Param): - in_ = ParamTypes.header - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - convert_underscores: bool = True, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - self.convert_underscores = convert_underscores - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Cookie(Param): - in_ = ParamTypes.cookie - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Body(FieldInfo): - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - embed: Union[bool, None] = None, - media_type: str = "application/json", - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - self.embed = embed - self.media_type = media_type - if example is not _Unset: - warnings.warn( - "`example` has been deprecated, please use `examples` instead", - category=DeprecationWarning, - stacklevel=4, - ) - self.example = example - self.include_in_schema = include_in_schema - self.openapi_examples = openapi_examples - kwargs = dict( - default=default, - default_factory=default_factory, - alias=alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - discriminator=discriminator, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - **extra, - ) - if examples is not None: - kwargs["examples"] = examples - if regex is not None: - warnings.warn( - "`regex` has been deprecated, please use `pattern` instead", - category=DeprecationWarning, - stacklevel=4, - ) - current_json_schema_extra = json_schema_extra or extra - if PYDANTIC_VERSION_MINOR_TUPLE < (2, 7): - self.deprecated = deprecated - else: - kwargs["deprecated"] = deprecated - if PYDANTIC_V2: - kwargs.update( - { - "annotation": annotation, - "alias_priority": alias_priority, - "validation_alias": validation_alias, - "serialization_alias": serialization_alias, - "strict": strict, - "json_schema_extra": current_json_schema_extra, - } - ) - kwargs["pattern"] = pattern or regex - else: - kwargs["regex"] = pattern or regex - kwargs.update(**current_json_schema_extra) - - use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} - - super().__init__(**use_kwargs) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.default})" - - -class Form(Body): - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - media_type: str = "application/x-www-form-urlencoded", - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class File(Form): - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - media_type: str = "multipart/form-data", - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Union[deprecated, str, bool, None] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Depends: - def __init__( - self, dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True - ): - self.dependency = dependency - self.use_cache = use_cache - - def __repr__(self) -> str: - attr = getattr(self.dependency, "__name__", type(self.dependency).__name__) - cache = "" if self.use_cache else ", use_cache=False" - return f"{self.__class__.__name__}({attr}{cache})" - - -class Security(Depends): - def __init__( - self, - dependency: Optional[Callable[..., Any]] = None, - *, - scopes: Optional[Sequence[str]] = None, - use_cache: bool = True, - ): - super().__init__(dependency=dependency, use_cache=use_cache) - self.scopes = scopes or [] diff --git a/venv/lib/python3.12/site-packages/fastapi/py.typed b/venv/lib/python3.12/site-packages/fastapi/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/fastapi/requests.py b/venv/lib/python3.12/site-packages/fastapi/requests.py deleted file mode 100644 index d16552c0..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/requests.py +++ /dev/null @@ -1,2 +0,0 @@ -from starlette.requests import HTTPConnection as HTTPConnection # noqa: F401 -from starlette.requests import Request as Request # noqa: F401 diff --git a/venv/lib/python3.12/site-packages/fastapi/responses.py b/venv/lib/python3.12/site-packages/fastapi/responses.py deleted file mode 100644 index 6c8db6f3..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/responses.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Any - -from starlette.responses import FileResponse as FileResponse # noqa -from starlette.responses import HTMLResponse as HTMLResponse # noqa -from starlette.responses import JSONResponse as JSONResponse # noqa -from starlette.responses import PlainTextResponse as PlainTextResponse # noqa -from starlette.responses import RedirectResponse as RedirectResponse # noqa -from starlette.responses import Response as Response # noqa -from starlette.responses import StreamingResponse as StreamingResponse # noqa - -try: - import ujson -except ImportError: # pragma: nocover - ujson = None # type: ignore - - -try: - import orjson -except ImportError: # pragma: nocover - orjson = None # type: ignore - - -class UJSONResponse(JSONResponse): - """ - JSON response using the high-performance ujson library to serialize data to JSON. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). - """ - - def render(self, content: Any) -> bytes: - assert ujson is not None, "ujson must be installed to use UJSONResponse" - return ujson.dumps(content, ensure_ascii=False).encode("utf-8") - - -class ORJSONResponse(JSONResponse): - """ - JSON response using the high-performance orjson library to serialize data to JSON. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). - """ - - def render(self, content: Any) -> bytes: - assert orjson is not None, "orjson must be installed to use ORJSONResponse" - return orjson.dumps( - content, option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SERIALIZE_NUMPY - ) diff --git a/venv/lib/python3.12/site-packages/fastapi/routing.py b/venv/lib/python3.12/site-packages/fastapi/routing.py deleted file mode 100644 index 54c75a02..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/routing.py +++ /dev/null @@ -1,4440 +0,0 @@ -import asyncio -import dataclasses -import email.message -import inspect -import json -from contextlib import AsyncExitStack, asynccontextmanager -from enum import Enum, IntEnum -from typing import ( - Any, - AsyncIterator, - Callable, - Collection, - Coroutine, - Dict, - List, - Mapping, - Optional, - Sequence, - Set, - Tuple, - Type, - Union, -) - -from fastapi import params -from fastapi._compat import ( - ModelField, - Undefined, - _get_model_config, - _model_dump, - _normalize_errors, - lenient_issubclass, -) -from fastapi.datastructures import Default, DefaultPlaceholder -from fastapi.dependencies.models import Dependant -from fastapi.dependencies.utils import ( - _should_embed_body_fields, - get_body_field, - get_dependant, - get_flat_dependant, - get_parameterless_sub_dependant, - get_typed_return_annotation, - solve_dependencies, -) -from fastapi.encoders import jsonable_encoder -from fastapi.exceptions import ( - FastAPIError, - RequestValidationError, - ResponseValidationError, - WebSocketRequestValidationError, -) -from fastapi.types import DecoratedCallable, IncEx -from fastapi.utils import ( - create_cloned_field, - create_model_field, - generate_unique_id, - get_value_or_default, - is_body_allowed_for_status_code, -) -from pydantic import BaseModel -from starlette import routing -from starlette.concurrency import run_in_threadpool -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.responses import JSONResponse, Response -from starlette.routing import ( - BaseRoute, - Match, - compile_path, - get_name, - request_response, - websocket_session, -) -from starlette.routing import Mount as Mount # noqa -from starlette.types import AppType, ASGIApp, Lifespan, Scope -from starlette.websockets import WebSocket -from typing_extensions import Annotated, Doc, deprecated - - -def _prepare_response_content( - res: Any, - *, - exclude_unset: bool, - exclude_defaults: bool = False, - exclude_none: bool = False, -) -> Any: - if isinstance(res, BaseModel): - read_with_orm_mode = getattr(_get_model_config(res), "read_with_orm_mode", None) - if read_with_orm_mode: - # Let from_orm extract the data from this model instead of converting - # it now to a dict. - # Otherwise, there's no way to extract lazy data that requires attribute - # access instead of dict iteration, e.g. lazy relationships. - return res - return _model_dump( - res, - by_alias=True, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - elif isinstance(res, list): - return [ - _prepare_response_content( - item, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - for item in res - ] - elif isinstance(res, dict): - return { - k: _prepare_response_content( - v, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - for k, v in res.items() - } - elif dataclasses.is_dataclass(res): - return dataclasses.asdict(res) - return res - - -def _merge_lifespan_context( - original_context: Lifespan[Any], nested_context: Lifespan[Any] -) -> Lifespan[Any]: - @asynccontextmanager - async def merged_lifespan( - app: AppType, - ) -> AsyncIterator[Optional[Mapping[str, Any]]]: - async with original_context(app) as maybe_original_state: - async with nested_context(app) as maybe_nested_state: - if maybe_nested_state is None and maybe_original_state is None: - yield None # old ASGI compatibility - else: - yield {**(maybe_nested_state or {}), **(maybe_original_state or {})} - - return merged_lifespan # type: ignore[return-value] - - -async def serialize_response( - *, - field: Optional[ModelField] = None, - response_content: Any, - include: Optional[IncEx] = None, - exclude: Optional[IncEx] = None, - by_alias: bool = True, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - is_coroutine: bool = True, -) -> Any: - if field: - errors = [] - if not hasattr(field, "serialize"): - # pydantic v1 - response_content = _prepare_response_content( - response_content, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - if is_coroutine: - value, errors_ = field.validate(response_content, {}, loc=("response",)) - else: - value, errors_ = await run_in_threadpool( - field.validate, response_content, {}, loc=("response",) - ) - if isinstance(errors_, list): - errors.extend(errors_) - elif errors_: - errors.append(errors_) - if errors: - raise ResponseValidationError( - errors=_normalize_errors(errors), body=response_content - ) - - if hasattr(field, "serialize"): - return field.serialize( - value, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - return jsonable_encoder( - value, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - else: - return jsonable_encoder(response_content) - - -async def run_endpoint_function( - *, dependant: Dependant, values: Dict[str, Any], is_coroutine: bool -) -> Any: - # Only called by get_request_handler. Has been split into its own function to - # facilitate profiling endpoints, since inner functions are harder to profile. - assert dependant.call is not None, "dependant.call must be a function" - - if is_coroutine: - return await dependant.call(**values) - else: - return await run_in_threadpool(dependant.call, **values) - - -def get_request_handler( - dependant: Dependant, - body_field: Optional[ModelField] = None, - status_code: Optional[int] = None, - response_class: Union[Type[Response], DefaultPlaceholder] = Default(JSONResponse), - response_field: Optional[ModelField] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - dependency_overrides_provider: Optional[Any] = None, - embed_body_fields: bool = False, -) -> Callable[[Request], Coroutine[Any, Any, Response]]: - assert dependant.call is not None, "dependant.call must be a function" - is_coroutine = asyncio.iscoroutinefunction(dependant.call) - is_body_form = body_field and isinstance(body_field.field_info, params.Form) - if isinstance(response_class, DefaultPlaceholder): - actual_response_class: Type[Response] = response_class.value - else: - actual_response_class = response_class - - async def app(request: Request) -> Response: - response: Union[Response, None] = None - async with AsyncExitStack() as file_stack: - try: - body: Any = None - if body_field: - if is_body_form: - body = await request.form() - file_stack.push_async_callback(body.close) - else: - body_bytes = await request.body() - if body_bytes: - json_body: Any = Undefined - content_type_value = request.headers.get("content-type") - if not content_type_value: - json_body = await request.json() - else: - message = email.message.Message() - message["content-type"] = content_type_value - if message.get_content_maintype() == "application": - subtype = message.get_content_subtype() - if subtype == "json" or subtype.endswith("+json"): - json_body = await request.json() - if json_body != Undefined: - body = json_body - else: - body = body_bytes - except json.JSONDecodeError as e: - validation_error = RequestValidationError( - [ - { - "type": "json_invalid", - "loc": ("body", e.pos), - "msg": "JSON decode error", - "input": {}, - "ctx": {"error": e.msg}, - } - ], - body=e.doc, - ) - raise validation_error from e - except HTTPException: - # If a middleware raises an HTTPException, it should be raised again - raise - except Exception as e: - http_error = HTTPException( - status_code=400, detail="There was an error parsing the body" - ) - raise http_error from e - errors: List[Any] = [] - async with AsyncExitStack() as async_exit_stack: - solved_result = await solve_dependencies( - request=request, - dependant=dependant, - body=body, - dependency_overrides_provider=dependency_overrides_provider, - async_exit_stack=async_exit_stack, - embed_body_fields=embed_body_fields, - ) - errors = solved_result.errors - if not errors: - raw_response = await run_endpoint_function( - dependant=dependant, - values=solved_result.values, - is_coroutine=is_coroutine, - ) - if isinstance(raw_response, Response): - if raw_response.background is None: - raw_response.background = solved_result.background_tasks - response = raw_response - else: - response_args: Dict[str, Any] = { - "background": solved_result.background_tasks - } - # If status_code was set, use it, otherwise use the default from the - # response class, in the case of redirect it's 307 - current_status_code = ( - status_code - if status_code - else solved_result.response.status_code - ) - if current_status_code is not None: - response_args["status_code"] = current_status_code - if solved_result.response.status_code: - response_args["status_code"] = ( - solved_result.response.status_code - ) - content = await serialize_response( - field=response_field, - response_content=raw_response, - include=response_model_include, - exclude=response_model_exclude, - by_alias=response_model_by_alias, - exclude_unset=response_model_exclude_unset, - exclude_defaults=response_model_exclude_defaults, - exclude_none=response_model_exclude_none, - is_coroutine=is_coroutine, - ) - response = actual_response_class(content, **response_args) - if not is_body_allowed_for_status_code(response.status_code): - response.body = b"" - response.headers.raw.extend(solved_result.response.headers.raw) - if errors: - validation_error = RequestValidationError( - _normalize_errors(errors), body=body - ) - raise validation_error - if response is None: - raise FastAPIError( - "No response object was returned. There's a high chance that the " - "application code is raising an exception and a dependency with yield " - "has a block with a bare except, or a block with except Exception, " - "and is not raising the exception again. Read more about it in the " - "docs: https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-with-yield/#dependencies-with-yield-and-except" - ) - return response - - return app - - -def get_websocket_app( - dependant: Dependant, - dependency_overrides_provider: Optional[Any] = None, - embed_body_fields: bool = False, -) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]: - async def app(websocket: WebSocket) -> None: - async with AsyncExitStack() as async_exit_stack: - # TODO: remove this scope later, after a few releases - # This scope fastapi_astack is no longer used by FastAPI, kept for - # compatibility, just in case - websocket.scope["fastapi_astack"] = async_exit_stack - solved_result = await solve_dependencies( - request=websocket, - dependant=dependant, - dependency_overrides_provider=dependency_overrides_provider, - async_exit_stack=async_exit_stack, - embed_body_fields=embed_body_fields, - ) - if solved_result.errors: - raise WebSocketRequestValidationError( - _normalize_errors(solved_result.errors) - ) - assert dependant.call is not None, "dependant.call must be a function" - await dependant.call(**solved_result.values) - - return app - - -class APIWebSocketRoute(routing.WebSocketRoute): - def __init__( - self, - path: str, - endpoint: Callable[..., Any], - *, - name: Optional[str] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - dependency_overrides_provider: Optional[Any] = None, - ) -> None: - self.path = path - self.endpoint = endpoint - self.name = get_name(endpoint) if name is None else name - self.dependencies = list(dependencies or []) - self.path_regex, self.path_format, self.param_convertors = compile_path(path) - self.dependant = get_dependant(path=self.path_format, call=self.endpoint) - for depends in self.dependencies[::-1]: - self.dependant.dependencies.insert( - 0, - get_parameterless_sub_dependant(depends=depends, path=self.path_format), - ) - self._flat_dependant = get_flat_dependant(self.dependant) - self._embed_body_fields = _should_embed_body_fields( - self._flat_dependant.body_params - ) - self.app = websocket_session( - get_websocket_app( - dependant=self.dependant, - dependency_overrides_provider=dependency_overrides_provider, - embed_body_fields=self._embed_body_fields, - ) - ) - - def matches(self, scope: Scope) -> Tuple[Match, Scope]: - match, child_scope = super().matches(scope) - if match != Match.NONE: - child_scope["route"] = self - return match, child_scope - - -class APIRoute(routing.Route): - def __init__( - self, - path: str, - endpoint: Callable[..., Any], - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - name: Optional[str] = None, - methods: Optional[Union[Set[str], List[str]]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Union[Type[Response], DefaultPlaceholder] = Default( - JSONResponse - ), - dependency_overrides_provider: Optional[Any] = None, - callbacks: Optional[List[BaseRoute]] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Union[ - Callable[["APIRoute"], str], DefaultPlaceholder - ] = Default(generate_unique_id), - ) -> None: - self.path = path - self.endpoint = endpoint - if isinstance(response_model, DefaultPlaceholder): - return_annotation = get_typed_return_annotation(endpoint) - if lenient_issubclass(return_annotation, Response): - response_model = None - else: - response_model = return_annotation - self.response_model = response_model - self.summary = summary - self.response_description = response_description - self.deprecated = deprecated - self.operation_id = operation_id - self.response_model_include = response_model_include - self.response_model_exclude = response_model_exclude - self.response_model_by_alias = response_model_by_alias - self.response_model_exclude_unset = response_model_exclude_unset - self.response_model_exclude_defaults = response_model_exclude_defaults - self.response_model_exclude_none = response_model_exclude_none - self.include_in_schema = include_in_schema - self.response_class = response_class - self.dependency_overrides_provider = dependency_overrides_provider - self.callbacks = callbacks - self.openapi_extra = openapi_extra - self.generate_unique_id_function = generate_unique_id_function - self.tags = tags or [] - self.responses = responses or {} - self.name = get_name(endpoint) if name is None else name - self.path_regex, self.path_format, self.param_convertors = compile_path(path) - if methods is None: - methods = ["GET"] - self.methods: Set[str] = {method.upper() for method in methods} - if isinstance(generate_unique_id_function, DefaultPlaceholder): - current_generate_unique_id: Callable[[APIRoute], str] = ( - generate_unique_id_function.value - ) - else: - current_generate_unique_id = generate_unique_id_function - self.unique_id = self.operation_id or current_generate_unique_id(self) - # normalize enums e.g. http.HTTPStatus - if isinstance(status_code, IntEnum): - status_code = int(status_code) - self.status_code = status_code - if self.response_model: - assert is_body_allowed_for_status_code(status_code), ( - f"Status code {status_code} must not have a response body" - ) - response_name = "Response_" + self.unique_id - self.response_field = create_model_field( - name=response_name, - type_=self.response_model, - mode="serialization", - ) - # Create a clone of the field, so that a Pydantic submodel is not returned - # as is just because it's an instance of a subclass of a more limited class - # e.g. UserInDB (containing hashed_password) could be a subclass of User - # that doesn't have the hashed_password. But because it's a subclass, it - # would pass the validation and be returned as is. - # By being a new field, no inheritance will be passed as is. A new model - # will always be created. - # TODO: remove when deprecating Pydantic v1 - self.secure_cloned_response_field: Optional[ModelField] = ( - create_cloned_field(self.response_field) - ) - else: - self.response_field = None # type: ignore - self.secure_cloned_response_field = None - self.dependencies = list(dependencies or []) - self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "") - # if a "form feed" character (page break) is found in the description text, - # truncate description text to the content preceding the first "form feed" - self.description = self.description.split("\f")[0].strip() - response_fields = {} - for additional_status_code, response in self.responses.items(): - assert isinstance(response, dict), "An additional response must be a dict" - model = response.get("model") - if model: - assert is_body_allowed_for_status_code(additional_status_code), ( - f"Status code {additional_status_code} must not have a response body" - ) - response_name = f"Response_{additional_status_code}_{self.unique_id}" - response_field = create_model_field( - name=response_name, type_=model, mode="serialization" - ) - response_fields[additional_status_code] = response_field - if response_fields: - self.response_fields: Dict[Union[int, str], ModelField] = response_fields - else: - self.response_fields = {} - - assert callable(endpoint), "An endpoint must be a callable" - self.dependant = get_dependant(path=self.path_format, call=self.endpoint) - for depends in self.dependencies[::-1]: - self.dependant.dependencies.insert( - 0, - get_parameterless_sub_dependant(depends=depends, path=self.path_format), - ) - self._flat_dependant = get_flat_dependant(self.dependant) - self._embed_body_fields = _should_embed_body_fields( - self._flat_dependant.body_params - ) - self.body_field = get_body_field( - flat_dependant=self._flat_dependant, - name=self.unique_id, - embed_body_fields=self._embed_body_fields, - ) - self.app = request_response(self.get_route_handler()) - - def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]: - return get_request_handler( - dependant=self.dependant, - body_field=self.body_field, - status_code=self.status_code, - response_class=self.response_class, - response_field=self.secure_cloned_response_field, - response_model_include=self.response_model_include, - response_model_exclude=self.response_model_exclude, - response_model_by_alias=self.response_model_by_alias, - response_model_exclude_unset=self.response_model_exclude_unset, - response_model_exclude_defaults=self.response_model_exclude_defaults, - response_model_exclude_none=self.response_model_exclude_none, - dependency_overrides_provider=self.dependency_overrides_provider, - embed_body_fields=self._embed_body_fields, - ) - - def matches(self, scope: Scope) -> Tuple[Match, Scope]: - match, child_scope = super().matches(scope) - if match != Match.NONE: - child_scope["route"] = self - return match, child_scope - - -class APIRouter(routing.Router): - """ - `APIRouter` class, used to group *path operations*, for example to structure - an app in multiple files. It would then be included in the `FastAPI` app, or - in another `APIRouter` (ultimately included in the app). - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/). - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - - @router.get("/users/", tags=["users"]) - async def read_users(): - return [{"username": "Rick"}, {"username": "Morty"}] - - - app.include_router(router) - ``` - """ - - def __init__( - self, - *, - prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to all the *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to all the - *path operations* in this router. - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - The default response class to be used. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - """ - ), - ] = Default(JSONResponse), - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - OpenAPI callbacks that should apply to all *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - routes: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - **Note**: you probably shouldn't use this parameter, it is inherited - from Starlette and supported for compatibility. - - --- - - A list of routes to serve incoming HTTP and WebSocket requests. - """ - ), - deprecated( - """ - You normally wouldn't use this parameter with FastAPI, it is inherited - from Starlette and supported for compatibility. - - In FastAPI, you normally would use the *path operation methods*, - like `router.get()`, `router.post()`, etc. - """ - ), - ] = None, - redirect_slashes: Annotated[ - bool, - Doc( - """ - Whether to detect and redirect slashes in URLs when the client doesn't - use the same format. - """ - ), - ] = True, - default: Annotated[ - Optional[ASGIApp], - Doc( - """ - Default function handler for this router. Used to handle - 404 Not Found errors. - """ - ), - ] = None, - dependency_overrides_provider: Annotated[ - Optional[Any], - Doc( - """ - Only used internally by FastAPI to handle dependency overrides. - - You shouldn't need to use it. It normally points to the `FastAPI` app - object. - """ - ), - ] = None, - route_class: Annotated[ - Type[APIRoute], - Doc( - """ - Custom route (*path operation*) class to be used by this router. - - Read more about it in the - [FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router). - """ - ), - ] = APIRoute, - on_startup: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of startup event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - on_shutdown: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of shutdown event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - # the generic to Lifespan[AppType] is the type of the top level application - # which the router cannot know statically, so we use typing.Any - lifespan: Annotated[ - Optional[Lifespan[Any]], - Doc( - """ - A `Lifespan` context manager handler. This replaces `startup` and - `shutdown` functions with a single context manager. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all *path operations* in this router as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) all the *path operations* in this router in the - generated OpenAPI. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> None: - super().__init__( - routes=routes, - redirect_slashes=redirect_slashes, - default=default, - on_startup=on_startup, - on_shutdown=on_shutdown, - lifespan=lifespan, - ) - if prefix: - assert prefix.startswith("/"), "A path prefix must start with '/'" - assert not prefix.endswith("/"), ( - "A path prefix must not end with '/', as the routes will start with '/'" - ) - self.prefix = prefix - self.tags: List[Union[str, Enum]] = tags or [] - self.dependencies = list(dependencies or []) - self.deprecated = deprecated - self.include_in_schema = include_in_schema - self.responses = responses or {} - self.callbacks = callbacks or [] - self.dependency_overrides_provider = dependency_overrides_provider - self.route_class = route_class - self.default_response_class = default_response_class - self.generate_unique_id_function = generate_unique_id_function - - def route( - self, - path: str, - methods: Optional[Collection[str]] = None, - name: Optional[str] = None, - include_in_schema: bool = True, - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_route( - path, - func, - methods=methods, - name=name, - include_in_schema=include_in_schema, - ) - return func - - return decorator - - def add_api_route( - self, - path: str, - endpoint: Callable[..., Any], - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[Union[Set[str], List[str]]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Union[Type[Response], DefaultPlaceholder] = Default( - JSONResponse - ), - name: Optional[str] = None, - route_class_override: Optional[Type[APIRoute]] = None, - callbacks: Optional[List[BaseRoute]] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Union[ - Callable[[APIRoute], str], DefaultPlaceholder - ] = Default(generate_unique_id), - ) -> None: - route_class = route_class_override or self.route_class - responses = responses or {} - combined_responses = {**self.responses, **responses} - current_response_class = get_value_or_default( - response_class, self.default_response_class - ) - current_tags = self.tags.copy() - if tags: - current_tags.extend(tags) - current_dependencies = self.dependencies.copy() - if dependencies: - current_dependencies.extend(dependencies) - current_callbacks = self.callbacks.copy() - if callbacks: - current_callbacks.extend(callbacks) - current_generate_unique_id = get_value_or_default( - generate_unique_id_function, self.generate_unique_id_function - ) - route = route_class( - self.prefix + path, - endpoint=endpoint, - response_model=response_model, - status_code=status_code, - tags=current_tags, - dependencies=current_dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=combined_responses, - deprecated=deprecated or self.deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema and self.include_in_schema, - response_class=current_response_class, - name=name, - dependency_overrides_provider=self.dependency_overrides_provider, - callbacks=current_callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=current_generate_unique_id, - ) - self.routes.append(route) - - def api_route( - self, - path: str, - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[List[str]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Type[Response] = Default(JSONResponse), - name: Optional[str] = None, - callbacks: Optional[List[BaseRoute]] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Callable[[APIRoute], str] = Default( - generate_unique_id - ), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_api_route( - path, - func, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - return func - - return decorator - - def add_api_websocket_route( - self, - path: str, - endpoint: Callable[..., Any], - name: Optional[str] = None, - *, - dependencies: Optional[Sequence[params.Depends]] = None, - ) -> None: - current_dependencies = self.dependencies.copy() - if dependencies: - current_dependencies.extend(dependencies) - - route = APIWebSocketRoute( - self.prefix + path, - endpoint=endpoint, - name=name, - dependencies=current_dependencies, - dependency_overrides_provider=self.dependency_overrides_provider, - ) - self.routes.append(route) - - def websocket( - self, - path: Annotated[ - str, - Doc( - """ - WebSocket path. - """ - ), - ], - name: Annotated[ - Optional[str], - Doc( - """ - A name for the WebSocket. Only used internally. - """ - ), - ] = None, - *, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be used for this - WebSocket. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - """ - ), - ] = None, - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Decorate a WebSocket function. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - - **Example** - - ## Example - - ```python - from fastapi import APIRouter, FastAPI, WebSocket - - app = FastAPI() - router = APIRouter() - - @router.websocket("/ws") - async def websocket_endpoint(websocket: WebSocket): - await websocket.accept() - while True: - data = await websocket.receive_text() - await websocket.send_text(f"Message text was: {data}") - - app.include_router(router) - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_api_websocket_route( - path, func, name=name, dependencies=dependencies - ) - return func - - return decorator - - def websocket_route( - self, path: str, name: Union[str, None] = None - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_websocket_route(path, func, name=name) - return func - - return decorator - - def include_router( - self, - router: Annotated["APIRouter", Doc("The `APIRouter` to include.")], - *, - prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to all the *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to all the - *path operations* in this router. - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - The default response class to be used. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - """ - ), - ] = Default(JSONResponse), - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - OpenAPI callbacks that should apply to all *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all *path operations* in this router as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include (or not) all the *path operations* in this router in the - generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> None: - """ - Include another `APIRouter` in the same current `APIRouter`. - - Read more about it in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - internal_router = APIRouter() - users_router = APIRouter() - - @users_router.get("/users/") - def read_users(): - return [{"name": "Rick"}, {"name": "Morty"}] - - internal_router.include_router(users_router) - app.include_router(internal_router) - ``` - """ - if prefix: - assert prefix.startswith("/"), "A path prefix must start with '/'" - assert not prefix.endswith("/"), ( - "A path prefix must not end with '/', as the routes will start with '/'" - ) - else: - for r in router.routes: - path = getattr(r, "path") # noqa: B009 - name = getattr(r, "name", "unknown") - if path is not None and not path: - raise FastAPIError( - f"Prefix and path cannot be both empty (path operation: {name})" - ) - if responses is None: - responses = {} - for route in router.routes: - if isinstance(route, APIRoute): - combined_responses = {**responses, **route.responses} - use_response_class = get_value_or_default( - route.response_class, - router.default_response_class, - default_response_class, - self.default_response_class, - ) - current_tags = [] - if tags: - current_tags.extend(tags) - if route.tags: - current_tags.extend(route.tags) - current_dependencies: List[params.Depends] = [] - if dependencies: - current_dependencies.extend(dependencies) - if route.dependencies: - current_dependencies.extend(route.dependencies) - current_callbacks = [] - if callbacks: - current_callbacks.extend(callbacks) - if route.callbacks: - current_callbacks.extend(route.callbacks) - current_generate_unique_id = get_value_or_default( - route.generate_unique_id_function, - router.generate_unique_id_function, - generate_unique_id_function, - self.generate_unique_id_function, - ) - self.add_api_route( - prefix + route.path, - route.endpoint, - response_model=route.response_model, - status_code=route.status_code, - tags=current_tags, - dependencies=current_dependencies, - summary=route.summary, - description=route.description, - response_description=route.response_description, - responses=combined_responses, - deprecated=route.deprecated or deprecated or self.deprecated, - methods=route.methods, - operation_id=route.operation_id, - response_model_include=route.response_model_include, - response_model_exclude=route.response_model_exclude, - response_model_by_alias=route.response_model_by_alias, - response_model_exclude_unset=route.response_model_exclude_unset, - response_model_exclude_defaults=route.response_model_exclude_defaults, - response_model_exclude_none=route.response_model_exclude_none, - include_in_schema=route.include_in_schema - and self.include_in_schema - and include_in_schema, - response_class=use_response_class, - name=route.name, - route_class_override=type(route), - callbacks=current_callbacks, - openapi_extra=route.openapi_extra, - generate_unique_id_function=current_generate_unique_id, - ) - elif isinstance(route, routing.Route): - methods = list(route.methods or []) - self.add_route( - prefix + route.path, - route.endpoint, - methods=methods, - include_in_schema=route.include_in_schema, - name=route.name, - ) - elif isinstance(route, APIWebSocketRoute): - current_dependencies = [] - if dependencies: - current_dependencies.extend(dependencies) - if route.dependencies: - current_dependencies.extend(route.dependencies) - self.add_api_websocket_route( - prefix + route.path, - route.endpoint, - dependencies=current_dependencies, - name=route.name, - ) - elif isinstance(route, routing.WebSocketRoute): - self.add_websocket_route( - prefix + route.path, route.endpoint, name=route.name - ) - for handler in router.on_startup: - self.add_event_handler("startup", handler) - for handler in router.on_shutdown: - self.add_event_handler("shutdown", handler) - self.lifespan_context = _merge_lifespan_context( - self.lifespan_context, - router.lifespan_context, - ) - - def get( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP GET operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - @router.get("/items/") - def read_items(): - return [{"name": "Empanada"}, {"name": "Arepa"}] - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["GET"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def put( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PUT operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.put("/items/{item_id}") - def replace_item(item_id: str, item: Item): - return {"message": "Item replaced", "id": item_id} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["PUT"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def post( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP POST operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.post("/items/") - def create_item(item: Item): - return {"message": "Item created"} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["POST"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def delete( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP DELETE operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - @router.delete("/items/{item_id}") - def delete_item(item_id: str): - return {"message": "Item deleted"} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["DELETE"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def options( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP OPTIONS operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - @router.options("/items/") - def get_item_options(): - return {"additions": ["Aji", "Guacamole"]} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["OPTIONS"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def head( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP HEAD operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.head("/items/", status_code=204) - def get_items_headers(response: Response): - response.headers["X-Cat-Dog"] = "Alone in the world" - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["HEAD"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def patch( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PATCH operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.patch("/items/") - def update_item(item: Item): - return {"message": "Item updated in place"} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["PATCH"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def trace( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-parameters-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP TRACE operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.trace("/items/{item_id}") - def trace_item(item_id: str): - return None - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["TRACE"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - @deprecated( - """ - on_event is deprecated, use lifespan event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). - """ - ) - def on_event( - self, - event_type: Annotated[ - str, - Doc( - """ - The type of event. `startup` or `shutdown`. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add an event handler for the router. - - `on_event` is deprecated, use `lifespan` event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_event_handler(event_type, func) - return func - - return decorator diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__init__.py b/venv/lib/python3.12/site-packages/fastapi/security/__init__.py deleted file mode 100644 index 3aa6bf21..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/security/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .api_key import APIKeyCookie as APIKeyCookie -from .api_key import APIKeyHeader as APIKeyHeader -from .api_key import APIKeyQuery as APIKeyQuery -from .http import HTTPAuthorizationCredentials as HTTPAuthorizationCredentials -from .http import HTTPBasic as HTTPBasic -from .http import HTTPBasicCredentials as HTTPBasicCredentials -from .http import HTTPBearer as HTTPBearer -from .http import HTTPDigest as HTTPDigest -from .oauth2 import OAuth2 as OAuth2 -from .oauth2 import OAuth2AuthorizationCodeBearer as OAuth2AuthorizationCodeBearer -from .oauth2 import OAuth2PasswordBearer as OAuth2PasswordBearer -from .oauth2 import OAuth2PasswordRequestForm as OAuth2PasswordRequestForm -from .oauth2 import OAuth2PasswordRequestFormStrict as OAuth2PasswordRequestFormStrict -from .oauth2 import SecurityScopes as SecurityScopes -from .open_id_connect_url import OpenIdConnect as OpenIdConnect diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index b75f404b..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/api_key.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/api_key.cpython-312.pyc deleted file mode 100644 index cc0f9a1e..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/api_key.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/base.cpython-312.pyc deleted file mode 100644 index df22a64f..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/http.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/http.cpython-312.pyc deleted file mode 100644 index b390cbff..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/http.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/oauth2.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/oauth2.cpython-312.pyc deleted file mode 100644 index e2c7c662..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/oauth2.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc deleted file mode 100644 index c66b366d..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/open_id_connect_url.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 3052f078..00000000 Binary files a/venv/lib/python3.12/site-packages/fastapi/security/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/fastapi/security/api_key.py b/venv/lib/python3.12/site-packages/fastapi/security/api_key.py deleted file mode 100644 index 70c2dca8..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/security/api_key.py +++ /dev/null @@ -1,288 +0,0 @@ -from typing import Optional - -from fastapi.openapi.models import APIKey, APIKeyIn -from fastapi.security.base import SecurityBase -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.status import HTTP_403_FORBIDDEN -from typing_extensions import Annotated, Doc - - -class APIKeyBase(SecurityBase): - @staticmethod - def check_api_key(api_key: Optional[str], auto_error: bool) -> Optional[str]: - if not api_key: - if auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - return None - return api_key - - -class APIKeyQuery(APIKeyBase): - """ - API key authentication using a query parameter. - - This defines the name of the query parameter that should be provided in the request - with the API key and integrates that into the OpenAPI documentation. It extracts - the key value sent in the query parameter automatically and provides it as the - dependency result. But it doesn't define how to send that API key to the client. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be a string containing the key value. - - ## Example - - ```python - from fastapi import Depends, FastAPI - from fastapi.security import APIKeyQuery - - app = FastAPI() - - query_scheme = APIKeyQuery(name="api_key") - - - @app.get("/items/") - async def read_items(api_key: str = Depends(query_scheme)): - return {"api_key": api_key} - ``` - """ - - def __init__( - self, - *, - name: Annotated[ - str, - Doc("Query parameter name."), - ], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the query parameter is not provided, `APIKeyQuery` will - automatically cancel the request and send the client an error. - - If `auto_error` is set to `False`, when the query parameter is not - available, instead of erroring out, the dependency result will be - `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in a query - parameter or in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model: APIKey = APIKey( - **{"in": APIKeyIn.query}, # type: ignore[arg-type] - name=name, - description=description, - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - api_key = request.query_params.get(self.model.name) - return self.check_api_key(api_key, self.auto_error) - - -class APIKeyHeader(APIKeyBase): - """ - API key authentication using a header. - - This defines the name of the header that should be provided in the request with - the API key and integrates that into the OpenAPI documentation. It extracts - the key value sent in the header automatically and provides it as the dependency - result. But it doesn't define how to send that key to the client. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be a string containing the key value. - - ## Example - - ```python - from fastapi import Depends, FastAPI - from fastapi.security import APIKeyHeader - - app = FastAPI() - - header_scheme = APIKeyHeader(name="x-key") - - - @app.get("/items/") - async def read_items(key: str = Depends(header_scheme)): - return {"key": key} - ``` - """ - - def __init__( - self, - *, - name: Annotated[str, Doc("Header name.")], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the header is not provided, `APIKeyHeader` will - automatically cancel the request and send the client an error. - - If `auto_error` is set to `False`, when the header is not available, - instead of erroring out, the dependency result will be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in a header or - in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model: APIKey = APIKey( - **{"in": APIKeyIn.header}, # type: ignore[arg-type] - name=name, - description=description, - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - api_key = request.headers.get(self.model.name) - return self.check_api_key(api_key, self.auto_error) - - -class APIKeyCookie(APIKeyBase): - """ - API key authentication using a cookie. - - This defines the name of the cookie that should be provided in the request with - the API key and integrates that into the OpenAPI documentation. It extracts - the key value sent in the cookie automatically and provides it as the dependency - result. But it doesn't define how to set that cookie. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be a string containing the key value. - - ## Example - - ```python - from fastapi import Depends, FastAPI - from fastapi.security import APIKeyCookie - - app = FastAPI() - - cookie_scheme = APIKeyCookie(name="session") - - - @app.get("/items/") - async def read_items(session: str = Depends(cookie_scheme)): - return {"session": session} - ``` - """ - - def __init__( - self, - *, - name: Annotated[str, Doc("Cookie name.")], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the cookie is not provided, `APIKeyCookie` will - automatically cancel the request and send the client an error. - - If `auto_error` is set to `False`, when the cookie is not available, - instead of erroring out, the dependency result will be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in a cookie or - in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model: APIKey = APIKey( - **{"in": APIKeyIn.cookie}, # type: ignore[arg-type] - name=name, - description=description, - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - api_key = request.cookies.get(self.model.name) - return self.check_api_key(api_key, self.auto_error) diff --git a/venv/lib/python3.12/site-packages/fastapi/security/base.py b/venv/lib/python3.12/site-packages/fastapi/security/base.py deleted file mode 100644 index c43555de..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/security/base.py +++ /dev/null @@ -1,6 +0,0 @@ -from fastapi.openapi.models import SecurityBase as SecurityBaseModel - - -class SecurityBase: - model: SecurityBaseModel - scheme_name: str diff --git a/venv/lib/python3.12/site-packages/fastapi/security/http.py b/venv/lib/python3.12/site-packages/fastapi/security/http.py deleted file mode 100644 index 9ab2df3c..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/security/http.py +++ /dev/null @@ -1,423 +0,0 @@ -import binascii -from base64 import b64decode -from typing import Optional - -from fastapi.exceptions import HTTPException -from fastapi.openapi.models import HTTPBase as HTTPBaseModel -from fastapi.openapi.models import HTTPBearer as HTTPBearerModel -from fastapi.security.base import SecurityBase -from fastapi.security.utils import get_authorization_scheme_param -from pydantic import BaseModel -from starlette.requests import Request -from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN -from typing_extensions import Annotated, Doc - - -class HTTPBasicCredentials(BaseModel): - """ - The HTTP Basic credentials given as the result of using `HTTPBasic` in a - dependency. - - Read more about it in the - [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). - """ - - username: Annotated[str, Doc("The HTTP Basic username.")] - password: Annotated[str, Doc("The HTTP Basic password.")] - - -class HTTPAuthorizationCredentials(BaseModel): - """ - The HTTP authorization credentials in the result of using `HTTPBearer` or - `HTTPDigest` in a dependency. - - The HTTP authorization header value is split by the first space. - - The first part is the `scheme`, the second part is the `credentials`. - - For example, in an HTTP Bearer token scheme, the client will send a header - like: - - ``` - Authorization: Bearer deadbeef12346 - ``` - - In this case: - - * `scheme` will have the value `"Bearer"` - * `credentials` will have the value `"deadbeef12346"` - """ - - scheme: Annotated[ - str, - Doc( - """ - The HTTP authorization scheme extracted from the header value. - """ - ), - ] - credentials: Annotated[ - str, - Doc( - """ - The HTTP authorization credentials extracted from the header value. - """ - ), - ] - - -class HTTPBase(SecurityBase): - def __init__( - self, - *, - scheme: str, - scheme_name: Optional[str] = None, - description: Optional[str] = None, - auto_error: bool = True, - ): - self.model = HTTPBaseModel(scheme=scheme, description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__( - self, request: Request - ) -> Optional[HTTPAuthorizationCredentials]: - authorization = request.headers.get("Authorization") - scheme, credentials = get_authorization_scheme_param(authorization) - if not (authorization and scheme and credentials): - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) - - -class HTTPBasic(HTTPBase): - """ - HTTP Basic authentication. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be an `HTTPBasicCredentials` object containing the - `username` and the `password`. - - Read more about it in the - [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import HTTPBasic, HTTPBasicCredentials - - app = FastAPI() - - security = HTTPBasic() - - - @app.get("/users/me") - def read_current_user(credentials: Annotated[HTTPBasicCredentials, Depends(security)]): - return {"username": credentials.username, "password": credentials.password} - ``` - """ - - def __init__( - self, - *, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - realm: Annotated[ - Optional[str], - Doc( - """ - HTTP Basic authentication realm. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the HTTP Basic authentication is not provided (a - header), `HTTPBasic` will automatically cancel the request and send the - client an error. - - If `auto_error` is set to `False`, when the HTTP Basic authentication - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in HTTP Basic - authentication or in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model = HTTPBaseModel(scheme="basic", description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.realm = realm - self.auto_error = auto_error - - async def __call__( # type: ignore - self, request: Request - ) -> Optional[HTTPBasicCredentials]: - authorization = request.headers.get("Authorization") - scheme, param = get_authorization_scheme_param(authorization) - if self.realm: - unauthorized_headers = {"WWW-Authenticate": f'Basic realm="{self.realm}"'} - else: - unauthorized_headers = {"WWW-Authenticate": "Basic"} - if not authorization or scheme.lower() != "basic": - if self.auto_error: - raise HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Not authenticated", - headers=unauthorized_headers, - ) - else: - return None - invalid_user_credentials_exc = HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Invalid authentication credentials", - headers=unauthorized_headers, - ) - try: - data = b64decode(param).decode("ascii") - except (ValueError, UnicodeDecodeError, binascii.Error): - raise invalid_user_credentials_exc # noqa: B904 - username, separator, password = data.partition(":") - if not separator: - raise invalid_user_credentials_exc - return HTTPBasicCredentials(username=username, password=password) - - -class HTTPBearer(HTTPBase): - """ - HTTP Bearer token authentication. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be an `HTTPAuthorizationCredentials` object containing - the `scheme` and the `credentials`. - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer - - app = FastAPI() - - security = HTTPBearer() - - - @app.get("/users/me") - def read_current_user( - credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] - ): - return {"scheme": credentials.scheme, "credentials": credentials.credentials} - ``` - """ - - def __init__( - self, - *, - bearerFormat: Annotated[Optional[str], Doc("Bearer token format.")] = None, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the HTTP Bearer token is not provided (in an - `Authorization` header), `HTTPBearer` will automatically cancel the - request and send the client an error. - - If `auto_error` is set to `False`, when the HTTP Bearer token - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in an HTTP - Bearer token or in a cookie). - """ - ), - ] = True, - ): - self.model = HTTPBearerModel(bearerFormat=bearerFormat, description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__( - self, request: Request - ) -> Optional[HTTPAuthorizationCredentials]: - authorization = request.headers.get("Authorization") - scheme, credentials = get_authorization_scheme_param(authorization) - if not (authorization and scheme and credentials): - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - if scheme.lower() != "bearer": - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, - detail="Invalid authentication credentials", - ) - else: - return None - return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) - - -class HTTPDigest(HTTPBase): - """ - HTTP Digest authentication. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be an `HTTPAuthorizationCredentials` object containing - the `scheme` and the `credentials`. - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import HTTPAuthorizationCredentials, HTTPDigest - - app = FastAPI() - - security = HTTPDigest() - - - @app.get("/users/me") - def read_current_user( - credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] - ): - return {"scheme": credentials.scheme, "credentials": credentials.credentials} - ``` - """ - - def __init__( - self, - *, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the HTTP Digest is not provided, `HTTPDigest` will - automatically cancel the request and send the client an error. - - If `auto_error` is set to `False`, when the HTTP Digest is not - available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in HTTP - Digest or in a cookie). - """ - ), - ] = True, - ): - self.model = HTTPBaseModel(scheme="digest", description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__( - self, request: Request - ) -> Optional[HTTPAuthorizationCredentials]: - authorization = request.headers.get("Authorization") - scheme, credentials = get_authorization_scheme_param(authorization) - if not (authorization and scheme and credentials): - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - if scheme.lower() != "digest": - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, - detail="Invalid authentication credentials", - ) - else: - return None - return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) diff --git a/venv/lib/python3.12/site-packages/fastapi/security/oauth2.py b/venv/lib/python3.12/site-packages/fastapi/security/oauth2.py deleted file mode 100644 index 88e394db..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/security/oauth2.py +++ /dev/null @@ -1,653 +0,0 @@ -from typing import Any, Dict, List, Optional, Union, cast - -from fastapi.exceptions import HTTPException -from fastapi.openapi.models import OAuth2 as OAuth2Model -from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel -from fastapi.param_functions import Form -from fastapi.security.base import SecurityBase -from fastapi.security.utils import get_authorization_scheme_param -from starlette.requests import Request -from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN - -# TODO: import from typing when deprecating Python 3.9 -from typing_extensions import Annotated, Doc - - -class OAuth2PasswordRequestForm: - """ - This is a dependency class to collect the `username` and `password` as form data - for an OAuth2 password flow. - - The OAuth2 specification dictates that for a password flow the data should be - collected using form data (instead of JSON) and that it should have the specific - fields `username` and `password`. - - All the initialization parameters are extracted from the request. - - Read more about it in the - [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import OAuth2PasswordRequestForm - - app = FastAPI() - - - @app.post("/login") - def login(form_data: Annotated[OAuth2PasswordRequestForm, Depends()]): - data = {} - data["scopes"] = [] - for scope in form_data.scopes: - data["scopes"].append(scope) - if form_data.client_id: - data["client_id"] = form_data.client_id - if form_data.client_secret: - data["client_secret"] = form_data.client_secret - return data - ``` - - Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. - You could have custom internal logic to separate it by colon characters (`:`) or - similar, and get the two parts `items` and `read`. Many applications do that to - group and organize permissions, you could do it as well in your application, just - know that that it is application specific, it's not part of the specification. - """ - - def __init__( - self, - *, - grant_type: Annotated[ - Union[str, None], - Form(pattern="^password$"), - Doc( - """ - The OAuth2 spec says it is required and MUST be the fixed string - "password". Nevertheless, this dependency class is permissive and - allows not passing it. If you want to enforce it, use instead the - `OAuth2PasswordRequestFormStrict` dependency. - """ - ), - ] = None, - username: Annotated[ - str, - Form(), - Doc( - """ - `username` string. The OAuth2 spec requires the exact field name - `username`. - """ - ), - ], - password: Annotated[ - str, - Form(json_schema_extra={"format": "password"}), - Doc( - """ - `password` string. The OAuth2 spec requires the exact field name - `password". - """ - ), - ], - scope: Annotated[ - str, - Form(), - Doc( - """ - A single string with actually several scopes separated by spaces. Each - scope is also a string. - - For example, a single string with: - - ```python - "items:read items:write users:read profile openid" - ```` - - would represent the scopes: - - * `items:read` - * `items:write` - * `users:read` - * `profile` - * `openid` - """ - ), - ] = "", - client_id: Annotated[ - Union[str, None], - Form(), - Doc( - """ - If there's a `client_id`, it can be sent as part of the form fields. - But the OAuth2 specification recommends sending the `client_id` and - `client_secret` (if any) using HTTP Basic auth. - """ - ), - ] = None, - client_secret: Annotated[ - Union[str, None], - Form(json_schema_extra={"format": "password"}), - Doc( - """ - If there's a `client_password` (and a `client_id`), they can be sent - as part of the form fields. But the OAuth2 specification recommends - sending the `client_id` and `client_secret` (if any) using HTTP Basic - auth. - """ - ), - ] = None, - ): - self.grant_type = grant_type - self.username = username - self.password = password - self.scopes = scope.split() - self.client_id = client_id - self.client_secret = client_secret - - -class OAuth2PasswordRequestFormStrict(OAuth2PasswordRequestForm): - """ - This is a dependency class to collect the `username` and `password` as form data - for an OAuth2 password flow. - - The OAuth2 specification dictates that for a password flow the data should be - collected using form data (instead of JSON) and that it should have the specific - fields `username` and `password`. - - All the initialization parameters are extracted from the request. - - The only difference between `OAuth2PasswordRequestFormStrict` and - `OAuth2PasswordRequestForm` is that `OAuth2PasswordRequestFormStrict` requires the - client to send the form field `grant_type` with the value `"password"`, which - is required in the OAuth2 specification (it seems that for no particular reason), - while for `OAuth2PasswordRequestForm` `grant_type` is optional. - - Read more about it in the - [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import OAuth2PasswordRequestForm - - app = FastAPI() - - - @app.post("/login") - def login(form_data: Annotated[OAuth2PasswordRequestFormStrict, Depends()]): - data = {} - data["scopes"] = [] - for scope in form_data.scopes: - data["scopes"].append(scope) - if form_data.client_id: - data["client_id"] = form_data.client_id - if form_data.client_secret: - data["client_secret"] = form_data.client_secret - return data - ``` - - Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. - You could have custom internal logic to separate it by colon characters (`:`) or - similar, and get the two parts `items` and `read`. Many applications do that to - group and organize permissions, you could do it as well in your application, just - know that that it is application specific, it's not part of the specification. - - - grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". - This dependency is strict about it. If you want to be permissive, use instead the - OAuth2PasswordRequestForm dependency class. - username: username string. The OAuth2 spec requires the exact field name "username". - password: password string. The OAuth2 spec requires the exact field name "password". - scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. - "items:read items:write users:read profile openid" - client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) - using HTTP Basic auth, as: client_id:client_secret - client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) - using HTTP Basic auth, as: client_id:client_secret - """ - - def __init__( - self, - grant_type: Annotated[ - str, - Form(pattern="^password$"), - Doc( - """ - The OAuth2 spec says it is required and MUST be the fixed string - "password". This dependency is strict about it. If you want to be - permissive, use instead the `OAuth2PasswordRequestForm` dependency - class. - """ - ), - ], - username: Annotated[ - str, - Form(), - Doc( - """ - `username` string. The OAuth2 spec requires the exact field name - `username`. - """ - ), - ], - password: Annotated[ - str, - Form(), - Doc( - """ - `password` string. The OAuth2 spec requires the exact field name - `password". - """ - ), - ], - scope: Annotated[ - str, - Form(), - Doc( - """ - A single string with actually several scopes separated by spaces. Each - scope is also a string. - - For example, a single string with: - - ```python - "items:read items:write users:read profile openid" - ```` - - would represent the scopes: - - * `items:read` - * `items:write` - * `users:read` - * `profile` - * `openid` - """ - ), - ] = "", - client_id: Annotated[ - Union[str, None], - Form(), - Doc( - """ - If there's a `client_id`, it can be sent as part of the form fields. - But the OAuth2 specification recommends sending the `client_id` and - `client_secret` (if any) using HTTP Basic auth. - """ - ), - ] = None, - client_secret: Annotated[ - Union[str, None], - Form(), - Doc( - """ - If there's a `client_password` (and a `client_id`), they can be sent - as part of the form fields. But the OAuth2 specification recommends - sending the `client_id` and `client_secret` (if any) using HTTP Basic - auth. - """ - ), - ] = None, - ): - super().__init__( - grant_type=grant_type, - username=username, - password=password, - scope=scope, - client_id=client_id, - client_secret=client_secret, - ) - - -class OAuth2(SecurityBase): - """ - This is the base class for OAuth2 authentication, an instance of it would be used - as a dependency. All other OAuth2 classes inherit from it and customize it for - each OAuth2 flow. - - You normally would not create a new class inheriting from it but use one of the - existing subclasses, and maybe compose them if you want to support multiple flows. - - Read more about it in the - [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/). - """ - - def __init__( - self, - *, - flows: Annotated[ - Union[OAuthFlowsModel, Dict[str, Dict[str, Any]]], - Doc( - """ - The dictionary of OAuth2 flows. - """ - ), - ] = OAuthFlowsModel(), - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Authorization header is provided, required for - OAuth2 authentication, it will automatically cancel the request and - send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OAuth2 - or in a cookie). - """ - ), - ] = True, - ): - self.model = OAuth2Model( - flows=cast(OAuthFlowsModel, flows), description=description - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - if not authorization: - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return authorization - - -class OAuth2PasswordBearer(OAuth2): - """ - OAuth2 flow for authentication using a bearer token obtained with a password. - An instance of it would be used as a dependency. - - Read more about it in the - [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). - """ - - def __init__( - self, - tokenUrl: Annotated[ - str, - Doc( - """ - The URL to obtain the OAuth2 token. This would be the *path operation* - that has `OAuth2PasswordRequestForm` as a dependency. - """ - ), - ], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - scopes: Annotated[ - Optional[Dict[str, str]], - Doc( - """ - The OAuth2 scopes that would be required by the *path operations* that - use this dependency. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Authorization header is provided, required for - OAuth2 authentication, it will automatically cancel the request and - send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OAuth2 - or in a cookie). - """ - ), - ] = True, - refreshUrl: Annotated[ - Optional[str], - Doc( - """ - The URL to refresh the token and obtain a new one. - """ - ), - ] = None, - ): - if not scopes: - scopes = {} - flows = OAuthFlowsModel( - password=cast( - Any, - { - "tokenUrl": tokenUrl, - "refreshUrl": refreshUrl, - "scopes": scopes, - }, - ) - ) - super().__init__( - flows=flows, - scheme_name=scheme_name, - description=description, - auto_error=auto_error, - ) - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - scheme, param = get_authorization_scheme_param(authorization) - if not authorization or scheme.lower() != "bearer": - if self.auto_error: - raise HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Not authenticated", - headers={"WWW-Authenticate": "Bearer"}, - ) - else: - return None - return param - - -class OAuth2AuthorizationCodeBearer(OAuth2): - """ - OAuth2 flow for authentication using a bearer token obtained with an OAuth2 code - flow. An instance of it would be used as a dependency. - """ - - def __init__( - self, - authorizationUrl: str, - tokenUrl: Annotated[ - str, - Doc( - """ - The URL to obtain the OAuth2 token. - """ - ), - ], - refreshUrl: Annotated[ - Optional[str], - Doc( - """ - The URL to refresh the token and obtain a new one. - """ - ), - ] = None, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - scopes: Annotated[ - Optional[Dict[str, str]], - Doc( - """ - The OAuth2 scopes that would be required by the *path operations* that - use this dependency. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Authorization header is provided, required for - OAuth2 authentication, it will automatically cancel the request and - send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OAuth2 - or in a cookie). - """ - ), - ] = True, - ): - if not scopes: - scopes = {} - flows = OAuthFlowsModel( - authorizationCode=cast( - Any, - { - "authorizationUrl": authorizationUrl, - "tokenUrl": tokenUrl, - "refreshUrl": refreshUrl, - "scopes": scopes, - }, - ) - ) - super().__init__( - flows=flows, - scheme_name=scheme_name, - description=description, - auto_error=auto_error, - ) - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - scheme, param = get_authorization_scheme_param(authorization) - if not authorization or scheme.lower() != "bearer": - if self.auto_error: - raise HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Not authenticated", - headers={"WWW-Authenticate": "Bearer"}, - ) - else: - return None # pragma: nocover - return param - - -class SecurityScopes: - """ - This is a special class that you can define in a parameter in a dependency to - obtain the OAuth2 scopes required by all the dependencies in the same chain. - - This way, multiple dependencies can have different scopes, even when used in the - same *path operation*. And with this, you can access all the scopes required in - all those dependencies in a single place. - - Read more about it in the - [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). - """ - - def __init__( - self, - scopes: Annotated[ - Optional[List[str]], - Doc( - """ - This will be filled by FastAPI. - """ - ), - ] = None, - ): - self.scopes: Annotated[ - List[str], - Doc( - """ - The list of all the scopes required by dependencies. - """ - ), - ] = scopes or [] - self.scope_str: Annotated[ - str, - Doc( - """ - All the scopes required by all the dependencies in a single string - separated by spaces, as defined in the OAuth2 specification. - """ - ), - ] = " ".join(self.scopes) diff --git a/venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py b/venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py deleted file mode 100644 index c8cceb91..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/security/open_id_connect_url.py +++ /dev/null @@ -1,84 +0,0 @@ -from typing import Optional - -from fastapi.openapi.models import OpenIdConnect as OpenIdConnectModel -from fastapi.security.base import SecurityBase -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.status import HTTP_403_FORBIDDEN -from typing_extensions import Annotated, Doc - - -class OpenIdConnect(SecurityBase): - """ - OpenID Connect authentication class. An instance of it would be used as a - dependency. - """ - - def __init__( - self, - *, - openIdConnectUrl: Annotated[ - str, - Doc( - """ - The OpenID Connect URL. - """ - ), - ], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Authorization header is provided, required for - OpenID Connect authentication, it will automatically cancel the request - and send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OpenID - Connect or in a cookie). - """ - ), - ] = True, - ): - self.model = OpenIdConnectModel( - openIdConnectUrl=openIdConnectUrl, description=description - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - if not authorization: - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return authorization diff --git a/venv/lib/python3.12/site-packages/fastapi/security/utils.py b/venv/lib/python3.12/site-packages/fastapi/security/utils.py deleted file mode 100644 index fa7a450b..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/security/utils.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Optional, Tuple - - -def get_authorization_scheme_param( - authorization_header_value: Optional[str], -) -> Tuple[str, str]: - if not authorization_header_value: - return "", "" - scheme, _, param = authorization_header_value.partition(" ") - return scheme, param diff --git a/venv/lib/python3.12/site-packages/fastapi/staticfiles.py b/venv/lib/python3.12/site-packages/fastapi/staticfiles.py deleted file mode 100644 index 299015d4..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/staticfiles.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.staticfiles import StaticFiles as StaticFiles # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/templating.py b/venv/lib/python3.12/site-packages/fastapi/templating.py deleted file mode 100644 index 0cb86848..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/templating.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.templating import Jinja2Templates as Jinja2Templates # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/testclient.py b/venv/lib/python3.12/site-packages/fastapi/testclient.py deleted file mode 100644 index 4012406a..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/testclient.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.testclient import TestClient as TestClient # noqa diff --git a/venv/lib/python3.12/site-packages/fastapi/types.py b/venv/lib/python3.12/site-packages/fastapi/types.py deleted file mode 100644 index 3205654c..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/types.py +++ /dev/null @@ -1,10 +0,0 @@ -import types -from enum import Enum -from typing import Any, Callable, Dict, Set, Type, TypeVar, Union - -from pydantic import BaseModel - -DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any]) -UnionType = getattr(types, "UnionType", Union) -ModelNameMap = Dict[Union[Type[BaseModel], Type[Enum]], str] -IncEx = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any]] diff --git a/venv/lib/python3.12/site-packages/fastapi/utils.py b/venv/lib/python3.12/site-packages/fastapi/utils.py deleted file mode 100644 index 4c7350fe..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/utils.py +++ /dev/null @@ -1,220 +0,0 @@ -import re -import warnings -from dataclasses import is_dataclass -from typing import ( - TYPE_CHECKING, - Any, - Dict, - MutableMapping, - Optional, - Set, - Type, - Union, - cast, -) -from weakref import WeakKeyDictionary - -import fastapi -from fastapi._compat import ( - PYDANTIC_V2, - BaseConfig, - ModelField, - PydanticSchemaGenerationError, - Undefined, - UndefinedType, - Validator, - lenient_issubclass, -) -from fastapi.datastructures import DefaultPlaceholder, DefaultType -from pydantic import BaseModel, create_model -from pydantic.fields import FieldInfo -from typing_extensions import Literal - -if TYPE_CHECKING: # pragma: nocover - from .routing import APIRoute - -# Cache for `create_cloned_field` -_CLONED_TYPES_CACHE: MutableMapping[Type[BaseModel], Type[BaseModel]] = ( - WeakKeyDictionary() -) - - -def is_body_allowed_for_status_code(status_code: Union[int, str, None]) -> bool: - if status_code is None: - return True - # Ref: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#patterned-fields-1 - if status_code in { - "default", - "1XX", - "2XX", - "3XX", - "4XX", - "5XX", - }: - return True - current_status_code = int(status_code) - return not (current_status_code < 200 or current_status_code in {204, 205, 304}) - - -def get_path_param_names(path: str) -> Set[str]: - return set(re.findall("{(.*?)}", path)) - - -def create_model_field( - name: str, - type_: Any, - class_validators: Optional[Dict[str, Validator]] = None, - default: Optional[Any] = Undefined, - required: Union[bool, UndefinedType] = Undefined, - model_config: Type[BaseConfig] = BaseConfig, - field_info: Optional[FieldInfo] = None, - alias: Optional[str] = None, - mode: Literal["validation", "serialization"] = "validation", -) -> ModelField: - class_validators = class_validators or {} - if PYDANTIC_V2: - field_info = field_info or FieldInfo( - annotation=type_, default=default, alias=alias - ) - else: - field_info = field_info or FieldInfo() - kwargs = {"name": name, "field_info": field_info} - if PYDANTIC_V2: - kwargs.update({"mode": mode}) - else: - kwargs.update( - { - "type_": type_, - "class_validators": class_validators, - "default": default, - "required": required, - "model_config": model_config, - "alias": alias, - } - ) - try: - return ModelField(**kwargs) # type: ignore[arg-type] - except (RuntimeError, PydanticSchemaGenerationError): - raise fastapi.exceptions.FastAPIError( - "Invalid args for response field! Hint: " - f"check that {type_} is a valid Pydantic field type. " - "If you are using a return type annotation that is not a valid Pydantic " - "field (e.g. Union[Response, dict, None]) you can disable generating the " - "response model from the type annotation with the path operation decorator " - "parameter response_model=None. Read more: " - "https://fastapi.tiangolo.com/tutorial/response-model/" - ) from None - - -def create_cloned_field( - field: ModelField, - *, - cloned_types: Optional[MutableMapping[Type[BaseModel], Type[BaseModel]]] = None, -) -> ModelField: - if PYDANTIC_V2: - return field - # cloned_types caches already cloned types to support recursive models and improve - # performance by avoiding unnecessary cloning - if cloned_types is None: - cloned_types = _CLONED_TYPES_CACHE - - original_type = field.type_ - if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"): - original_type = original_type.__pydantic_model__ - use_type = original_type - if lenient_issubclass(original_type, BaseModel): - original_type = cast(Type[BaseModel], original_type) - use_type = cloned_types.get(original_type) - if use_type is None: - use_type = create_model(original_type.__name__, __base__=original_type) - cloned_types[original_type] = use_type - for f in original_type.__fields__.values(): - use_type.__fields__[f.name] = create_cloned_field( - f, cloned_types=cloned_types - ) - new_field = create_model_field(name=field.name, type_=use_type) - new_field.has_alias = field.has_alias # type: ignore[attr-defined] - new_field.alias = field.alias # type: ignore[misc] - new_field.class_validators = field.class_validators # type: ignore[attr-defined] - new_field.default = field.default # type: ignore[misc] - new_field.required = field.required # type: ignore[misc] - new_field.model_config = field.model_config # type: ignore[attr-defined] - new_field.field_info = field.field_info - new_field.allow_none = field.allow_none # type: ignore[attr-defined] - new_field.validate_always = field.validate_always # type: ignore[attr-defined] - if field.sub_fields: # type: ignore[attr-defined] - new_field.sub_fields = [ # type: ignore[attr-defined] - create_cloned_field(sub_field, cloned_types=cloned_types) - for sub_field in field.sub_fields # type: ignore[attr-defined] - ] - if field.key_field: # type: ignore[attr-defined] - new_field.key_field = create_cloned_field( # type: ignore[attr-defined] - field.key_field, # type: ignore[attr-defined] - cloned_types=cloned_types, - ) - new_field.validators = field.validators # type: ignore[attr-defined] - new_field.pre_validators = field.pre_validators # type: ignore[attr-defined] - new_field.post_validators = field.post_validators # type: ignore[attr-defined] - new_field.parse_json = field.parse_json # type: ignore[attr-defined] - new_field.shape = field.shape # type: ignore[attr-defined] - new_field.populate_validators() # type: ignore[attr-defined] - return new_field - - -def generate_operation_id_for_path( - *, name: str, path: str, method: str -) -> str: # pragma: nocover - warnings.warn( - "fastapi.utils.generate_operation_id_for_path() was deprecated, " - "it is not used internally, and will be removed soon", - DeprecationWarning, - stacklevel=2, - ) - operation_id = f"{name}{path}" - operation_id = re.sub(r"\W", "_", operation_id) - operation_id = f"{operation_id}_{method.lower()}" - return operation_id - - -def generate_unique_id(route: "APIRoute") -> str: - operation_id = f"{route.name}{route.path_format}" - operation_id = re.sub(r"\W", "_", operation_id) - assert route.methods - operation_id = f"{operation_id}_{list(route.methods)[0].lower()}" - return operation_id - - -def deep_dict_update(main_dict: Dict[Any, Any], update_dict: Dict[Any, Any]) -> None: - for key, value in update_dict.items(): - if ( - key in main_dict - and isinstance(main_dict[key], dict) - and isinstance(value, dict) - ): - deep_dict_update(main_dict[key], value) - elif ( - key in main_dict - and isinstance(main_dict[key], list) - and isinstance(update_dict[key], list) - ): - main_dict[key] = main_dict[key] + update_dict[key] - else: - main_dict[key] = value - - -def get_value_or_default( - first_item: Union[DefaultPlaceholder, DefaultType], - *extra_items: Union[DefaultPlaceholder, DefaultType], -) -> Union[DefaultPlaceholder, DefaultType]: - """ - Pass items or `DefaultPlaceholder`s by descending priority. - - The first one to _not_ be a `DefaultPlaceholder` will be returned. - - Otherwise, the first item (a `DefaultPlaceholder`) will be returned. - """ - items = (first_item,) + extra_items - for item in items: - if not isinstance(item, DefaultPlaceholder): - return item - return first_item diff --git a/venv/lib/python3.12/site-packages/fastapi/websockets.py b/venv/lib/python3.12/site-packages/fastapi/websockets.py deleted file mode 100644 index 55a4ac4a..00000000 --- a/venv/lib/python3.12/site-packages/fastapi/websockets.py +++ /dev/null @@ -1,3 +0,0 @@ -from starlette.websockets import WebSocket as WebSocket # noqa -from starlette.websockets import WebSocketDisconnect as WebSocketDisconnect # noqa -from starlette.websockets import WebSocketState as WebSocketState # noqa diff --git a/venv/lib/python3.12/site-packages/gridfs/__init__.py b/venv/lib/python3.12/site-packages/gridfs/__init__.py deleted file mode 100644 index 8173561b..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GridFS is a specification for storing large objects in Mongo. - -The :mod:`gridfs` package is an implementation of GridFS on top of -:mod:`pymongo`, exposing a file-like interface. - -.. seealso:: The MongoDB documentation on `gridfs `_. -""" -from __future__ import annotations - -from gridfs.asynchronous.grid_file import ( - AsyncGridFS, - AsyncGridFSBucket, - AsyncGridIn, - AsyncGridOut, - AsyncGridOutCursor, -) -from gridfs.errors import NoFile -from gridfs.grid_file_shared import DEFAULT_CHUNK_SIZE -from gridfs.synchronous.grid_file import ( - GridFS, - GridFSBucket, - GridIn, - GridOut, - GridOutCursor, -) - -__all__ = [ - "AsyncGridFS", - "GridFS", - "AsyncGridFSBucket", - "GridFSBucket", - "NoFile", - "DEFAULT_CHUNK_SIZE", - "AsyncGridIn", - "GridIn", - "AsyncGridOut", - "GridOut", - "AsyncGridOutCursor", - "GridOutCursor", -] diff --git a/venv/lib/python3.12/site-packages/gridfs/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index ba1433f8..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/__pycache__/errors.cpython-312.pyc deleted file mode 100644 index bf56434a..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/__pycache__/errors.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/__pycache__/grid_file.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/__pycache__/grid_file.cpython-312.pyc deleted file mode 100644 index 7246db55..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/__pycache__/grid_file.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/__pycache__/grid_file_shared.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/__pycache__/grid_file_shared.cpython-312.pyc deleted file mode 100644 index 9c9b691d..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/__pycache__/grid_file_shared.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/asynchronous/__init__.py b/venv/lib/python3.12/site-packages/gridfs/asynchronous/__init__.py deleted file mode 100644 index 0826145b..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/asynchronous/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GridFS is a specification for storing large objects in Mongo. - -The :mod:`gridfs` package is an implementation of GridFS on top of -:mod:`pymongo`, exposing a file-like interface. - -.. seealso:: The MongoDB documentation on `gridfs `_. -""" -from __future__ import annotations - -from gridfs.asynchronous.grid_file import ( - AsyncGridFS, - AsyncGridFSBucket, - AsyncGridIn, - AsyncGridOut, - AsyncGridOutCursor, -) -from gridfs.errors import NoFile -from gridfs.grid_file_shared import DEFAULT_CHUNK_SIZE - -__all__ = [ - "AsyncGridFS", - "AsyncGridFSBucket", - "NoFile", - "DEFAULT_CHUNK_SIZE", - "AsyncGridIn", - "AsyncGridOut", - "AsyncGridOutCursor", -] diff --git a/venv/lib/python3.12/site-packages/gridfs/asynchronous/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/asynchronous/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index e45a1640..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/asynchronous/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/asynchronous/__pycache__/grid_file.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/asynchronous/__pycache__/grid_file.cpython-312.pyc deleted file mode 100644 index 761642f7..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/asynchronous/__pycache__/grid_file.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/asynchronous/grid_file.py b/venv/lib/python3.12/site-packages/gridfs/asynchronous/grid_file.py deleted file mode 100644 index 3c7d4ef0..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/asynchronous/grid_file.py +++ /dev/null @@ -1,2008 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for representing files stored in GridFS.""" -from __future__ import annotations - -import datetime -import inspect -import io -import math -from collections import abc -from typing import Any, Iterable, Mapping, NoReturn, Optional, cast - -from bson.int64 import Int64 -from bson.objectid import ObjectId -from gridfs.errors import CorruptGridFile, FileExists, NoFile -from gridfs.grid_file_shared import ( - _C_INDEX, - _CHUNK_OVERHEAD, - _F_INDEX, - _SEEK_CUR, - _SEEK_END, - _SEEK_SET, - _UPLOAD_BUFFER_CHUNKS, - _UPLOAD_BUFFER_SIZE, - DEFAULT_CHUNK_SIZE, - EMPTY, - NEWLN, - _a_grid_in_property, - _a_grid_out_property, - _clear_entity_type_registry, -) -from pymongo import ASCENDING, DESCENDING, WriteConcern, _csot -from pymongo.asynchronous.client_session import AsyncClientSession -from pymongo.asynchronous.collection import AsyncCollection -from pymongo.asynchronous.cursor import AsyncCursor -from pymongo.asynchronous.database import AsyncDatabase -from pymongo.asynchronous.helpers import anext -from pymongo.common import validate_string -from pymongo.errors import ( - BulkWriteError, - ConfigurationError, - CursorNotFound, - DuplicateKeyError, - InvalidOperation, - OperationFailure, -) -from pymongo.helpers_shared import _check_write_command_response -from pymongo.read_preferences import ReadPreference, _ServerMode - -_IS_SYNC = False - - -def _disallow_transactions(session: Optional[AsyncClientSession]) -> None: - if session and session.in_transaction: - raise InvalidOperation("GridFS does not support multi-document transactions") - - -class AsyncGridFS: - """An instance of GridFS on top of a single Database.""" - - def __init__(self, database: AsyncDatabase, collection: str = "fs"): - """Create a new instance of :class:`GridFS`. - - Raises :class:`TypeError` if `database` is not an instance of - :class:`~pymongo.database.Database`. - - :param database: database to use - :param collection: root collection to use - - .. versionchanged:: 4.0 - Removed the `disable_md5` parameter. See - :ref:`removed-gridfs-checksum` for details. - - .. versionchanged:: 3.11 - Running a GridFS operation in a transaction now always raises an - error. GridFS does not support multi-document transactions. - - .. versionchanged:: 3.7 - Added the `disable_md5` parameter. - - .. versionchanged:: 3.1 - Indexes are only ensured on the first write to the DB. - - .. versionchanged:: 3.0 - `database` must use an acknowledged - :attr:`~pymongo.database.Database.write_concern` - - .. seealso:: The MongoDB documentation on `gridfs `_. - """ - if not isinstance(database, AsyncDatabase): - raise TypeError(f"database must be an instance of Database, not {type(database)}") - - database = _clear_entity_type_registry(database) - - if not database.write_concern.acknowledged: - raise ConfigurationError("database must use acknowledged write_concern") - - self._collection = database[collection] - self._files = self._collection.files - self._chunks = self._collection.chunks - - def new_file(self, **kwargs: Any) -> AsyncGridIn: - """Create a new file in GridFS. - - Returns a new :class:`~gridfs.grid_file.GridIn` instance to - which data can be written. Any keyword arguments will be - passed through to :meth:`~gridfs.grid_file.GridIn`. - - If the ``"_id"`` of the file is manually specified, it must - not already exist in GridFS. Otherwise - :class:`~gridfs.errors.FileExists` is raised. - - :param kwargs: keyword arguments for file creation - """ - return AsyncGridIn(self._collection, **kwargs) - - async def put(self, data: Any, **kwargs: Any) -> Any: - """Put data in GridFS as a new file. - - Equivalent to doing:: - - with fs.new_file(**kwargs) as f: - f.write(data) - - `data` can be either an instance of :class:`bytes` or a file-like - object providing a :meth:`read` method. If an `encoding` keyword - argument is passed, `data` can also be a :class:`str` instance, which - will be encoded as `encoding` before being written. Any keyword - arguments will be passed through to the created file - see - :meth:`~gridfs.grid_file.GridIn` for possible arguments. Returns the - ``"_id"`` of the created file. - - If the ``"_id"`` of the file is manually specified, it must - not already exist in GridFS. Otherwise - :class:`~gridfs.errors.FileExists` is raised. - - :param data: data to be written as a file. - :param kwargs: keyword arguments for file creation - - .. versionchanged:: 3.0 - w=0 writes to GridFS are now prohibited. - """ - async with AsyncGridIn(self._collection, **kwargs) as grid_file: - await grid_file.write(data) - return grid_file._id - - async def get(self, file_id: Any, session: Optional[AsyncClientSession] = None) -> AsyncGridOut: - """Get a file from GridFS by ``"_id"``. - - Returns an instance of :class:`~gridfs.grid_file.GridOut`, - which provides a file-like interface for reading. - - :param file_id: ``"_id"`` of the file to get - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - gout = AsyncGridOut(self._collection, file_id, session=session) - - # Raise NoFile now, instead of on first attribute access. - await gout.open() - return gout - - async def get_version( - self, - filename: Optional[str] = None, - version: Optional[int] = -1, - session: Optional[AsyncClientSession] = None, - **kwargs: Any, - ) -> AsyncGridOut: - """Get a file from GridFS by ``"filename"`` or metadata fields. - - Returns a version of the file in GridFS whose filename matches - `filename` and whose metadata fields match the supplied keyword - arguments, as an instance of :class:`~gridfs.grid_file.GridOut`. - - Version numbering is a convenience atop the GridFS API provided - by MongoDB. If more than one file matches the query (either by - `filename` alone, by metadata fields, or by a combination of - both), then version ``-1`` will be the most recently uploaded - matching file, ``-2`` the second most recently - uploaded, etc. Version ``0`` will be the first version - uploaded, ``1`` the second version, etc. So if three versions - have been uploaded, then version ``0`` is the same as version - ``-3``, version ``1`` is the same as version ``-2``, and - version ``2`` is the same as version ``-1``. - - Raises :class:`~gridfs.errors.NoFile` if no such version of - that file exists. - - :param filename: ``"filename"`` of the file to get, or `None` - :param version: version of the file to get (defaults - to -1, the most recent version uploaded) - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - :param kwargs: find files by custom metadata. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.1 - ``get_version`` no longer ensures indexes. - """ - query = kwargs - if filename is not None: - query["filename"] = filename - - _disallow_transactions(session) - cursor = self._files.find(query, session=session) - if version is None: - version = -1 - if version < 0: - skip = abs(version) - 1 - cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING) - else: - cursor.limit(-1).skip(version).sort("uploadDate", ASCENDING) - try: - doc = await anext(cursor) - return AsyncGridOut(self._collection, file_document=doc, session=session) - except StopAsyncIteration: - raise NoFile("no version %d for filename %r" % (version, filename)) from None - - async def get_last_version( - self, - filename: Optional[str] = None, - session: Optional[AsyncClientSession] = None, - **kwargs: Any, - ) -> AsyncGridOut: - """Get the most recent version of a file in GridFS by ``"filename"`` - or metadata fields. - - Equivalent to calling :meth:`get_version` with the default - `version` (``-1``). - - :param filename: ``"filename"`` of the file to get, or `None` - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - :param kwargs: find files by custom metadata. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - return await self.get_version(filename=filename, session=session, **kwargs) - - # TODO add optional safe mode for chunk removal? - async def delete(self, file_id: Any, session: Optional[AsyncClientSession] = None) -> None: - """Delete a file from GridFS by ``"_id"``. - - Deletes all data belonging to the file with ``"_id"``: - `file_id`. - - .. warning:: Any processes/threads reading from the file while - this method is executing will likely see an invalid/corrupt - file. Care should be taken to avoid concurrent reads to a file - while it is being deleted. - - .. note:: Deletes of non-existent files are considered successful - since the end result is the same: no file with that _id remains. - - :param file_id: ``"_id"`` of the file to delete - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.1 - ``delete`` no longer ensures indexes. - """ - _disallow_transactions(session) - await self._files.delete_one({"_id": file_id}, session=session) - await self._chunks.delete_many({"files_id": file_id}, session=session) - - async def list(self, session: Optional[AsyncClientSession] = None) -> list[str]: - """List the names of all files stored in this instance of - :class:`GridFS`. - - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.1 - ``list`` no longer ensures indexes. - """ - _disallow_transactions(session) - # With an index, distinct includes documents with no filename - # as None. - return [ - name - for name in await self._files.distinct("filename", session=session) - if name is not None - ] - - async def find_one( - self, - filter: Optional[Any] = None, - session: Optional[AsyncClientSession] = None, - *args: Any, - **kwargs: Any, - ) -> Optional[AsyncGridOut]: - """Get a single file from gridfs. - - All arguments to :meth:`find` are also valid arguments for - :meth:`find_one`, although any `limit` argument will be - ignored. Returns a single :class:`~gridfs.grid_file.GridOut`, - or ``None`` if no matching file is found. For example: - - .. code-block: python - - file = fs.find_one({"filename": "lisa.txt"}) - - :param filter: a dictionary specifying - the query to be performing OR any other type to be used as - the value for a query for ``"_id"`` in the file collection. - :param args: any additional positional arguments are - the same as the arguments to :meth:`find`. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - :param kwargs: any additional keyword arguments - are the same as the arguments to :meth:`find`. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - if filter is not None and not isinstance(filter, abc.Mapping): - filter = {"_id": filter} - - _disallow_transactions(session) - async for f in self.find(filter, *args, session=session, **kwargs): - return f - - return None - - def find(self, *args: Any, **kwargs: Any) -> AsyncGridOutCursor: - """Query GridFS for files. - - Returns a cursor that iterates across files matching - arbitrary queries on the files collection. Can be combined - with other modifiers for additional control. For example:: - - for grid_out in fs.find({"filename": "lisa.txt"}, - no_cursor_timeout=True): - data = grid_out.read() - - would iterate through all versions of "lisa.txt" stored in GridFS. - Note that setting no_cursor_timeout to True may be important to - prevent the cursor from timing out during long multi-file processing - work. - - As another example, the call:: - - most_recent_three = fs.find().sort("uploadDate", -1).limit(3) - - would return a cursor to the three most recently uploaded files - in GridFS. - - Follows a similar interface to - :meth:`~pymongo.collection.Collection.find` - in :class:`~pymongo.collection.Collection`. - - If a :class:`~pymongo.client_session.AsyncClientSession` is passed to - :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances - are associated with that session. - - :param filter: A query document that selects which files - to include in the result set. Can be an empty document to include - all files. - :param skip: the number of files to omit (from - the start of the result set) when returning the results - :param limit: the maximum number of results to - return - :param no_cursor_timeout: if False (the default), any - returned cursor is closed by the server after 10 minutes of - inactivity. If set to True, the returned cursor will never - time out on the server. Care should be taken to ensure that - cursors with no_cursor_timeout turned on are properly closed. - :param sort: a list of (key, direction) pairs - specifying the sort order for this query. See - :meth:`~pymongo.cursor.Cursor.sort` for details. - - Raises :class:`TypeError` if any of the arguments are of - improper type. Returns an instance of - :class:`~gridfs.grid_file.GridOutCursor` - corresponding to this query. - - .. versionchanged:: 3.0 - Removed the read_preference, tag_sets, and - secondary_acceptable_latency_ms options. - .. versionadded:: 2.7 - .. seealso:: The MongoDB documentation on `find `_. - """ - return AsyncGridOutCursor(self._collection, *args, **kwargs) - - async def exists( - self, - document_or_id: Optional[Any] = None, - session: Optional[AsyncClientSession] = None, - **kwargs: Any, - ) -> bool: - """Check if a file exists in this instance of :class:`GridFS`. - - The file to check for can be specified by the value of its - ``_id`` key, or by passing in a query document. A query - document can be passed in as dictionary, or by using keyword - arguments. Thus, the following three calls are equivalent: - - >>> fs.exists(file_id) - >>> fs.exists({"_id": file_id}) - >>> fs.exists(_id=file_id) - - As are the following two calls: - - >>> fs.exists({"filename": "mike.txt"}) - >>> fs.exists(filename="mike.txt") - - And the following two: - - >>> fs.exists({"foo": {"$gt": 12}}) - >>> fs.exists(foo={"$gt": 12}) - - Returns ``True`` if a matching file exists, ``False`` - otherwise. Calls to :meth:`exists` will not automatically - create appropriate indexes; application developers should be - sure to create indexes if needed and as appropriate. - - :param document_or_id: query document, or _id of the - document to check for - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - :param kwargs: keyword arguments are used as a - query document, if they're present. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - _disallow_transactions(session) - if kwargs: - f = await self._files.find_one(kwargs, ["_id"], session=session) - else: - f = await self._files.find_one(document_or_id, ["_id"], session=session) - - return f is not None - - -class AsyncGridFSBucket: - """An instance of GridFS on top of a single Database.""" - - def __init__( - self, - db: AsyncDatabase, - bucket_name: str = "fs", - chunk_size_bytes: int = DEFAULT_CHUNK_SIZE, - write_concern: Optional[WriteConcern] = None, - read_preference: Optional[_ServerMode] = None, - ) -> None: - """Create a new instance of :class:`GridFSBucket`. - - Raises :exc:`TypeError` if `database` is not an instance of - :class:`~pymongo.database.Database`. - - Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern` - is not acknowledged. - - :param database: database to use. - :param bucket_name: The name of the bucket. Defaults to 'fs'. - :param chunk_size_bytes: The chunk size in bytes. Defaults - to 255KB. - :param write_concern: The - :class:`~pymongo.write_concern.WriteConcern` to use. If ``None`` - (the default) db.write_concern is used. - :param read_preference: The read preference to use. If - ``None`` (the default) db.read_preference is used. - - .. versionchanged:: 4.0 - Removed the `disable_md5` parameter. See - :ref:`removed-gridfs-checksum` for details. - - .. versionchanged:: 3.11 - Running a GridFSBucket operation in a transaction now always raises - an error. GridFSBucket does not support multi-document transactions. - - .. versionchanged:: 3.7 - Added the `disable_md5` parameter. - - .. versionadded:: 3.1 - - .. seealso:: The MongoDB documentation on `gridfs `_. - """ - if not isinstance(db, AsyncDatabase): - raise TypeError(f"database must be an instance of AsyncDatabase, not {type(db)}") - - db = _clear_entity_type_registry(db) - - wtc = write_concern if write_concern is not None else db.write_concern - if not wtc.acknowledged: - raise ConfigurationError("write concern must be acknowledged") - - self._bucket_name = bucket_name - self._collection = db[bucket_name] - self._chunks: AsyncCollection = self._collection.chunks.with_options( - write_concern=write_concern, read_preference=read_preference - ) - - self._files: AsyncCollection = self._collection.files.with_options( - write_concern=write_concern, read_preference=read_preference - ) - - self._chunk_size_bytes = chunk_size_bytes - self._timeout = db.client.options.timeout - - def open_upload_stream( - self, - filename: str, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[AsyncClientSession] = None, - ) -> AsyncGridIn: - """Opens a Stream that the application can write the contents of the - file to. - - The user must specify the filename, and can choose to add any - additional information in the metadata field of the file document or - modify the chunk size. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - with fs.open_upload_stream( - "test_file", chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) as grid_in: - grid_in.write("data I want to store!") - # uploaded on close - - Returns an instance of :class:`~gridfs.grid_file.GridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param filename: The name of the file to upload. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - validate_string("filename", filename) - - opts = { - "filename": filename, - "chunk_size": ( - chunk_size_bytes if chunk_size_bytes is not None else self._chunk_size_bytes - ), - } - if metadata is not None: - opts["metadata"] = metadata - - return AsyncGridIn(self._collection, session=session, **opts) - - def open_upload_stream_with_id( - self, - file_id: Any, - filename: str, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[AsyncClientSession] = None, - ) -> AsyncGridIn: - """Opens a Stream that the application can write the contents of the - file to. - - The user must specify the file id and filename, and can choose to add - any additional information in the metadata field of the file document - or modify the chunk size. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - with fs.open_upload_stream_with_id( - ObjectId(), - "test_file", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) as grid_in: - grid_in.write("data I want to store!") - # uploaded on close - - Returns an instance of :class:`~gridfs.grid_file.GridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param file_id: The id to use for this file. The id must not have - already been used for another file. - :param filename: The name of the file to upload. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - validate_string("filename", filename) - - opts = { - "_id": file_id, - "filename": filename, - "chunk_size": ( - chunk_size_bytes if chunk_size_bytes is not None else self._chunk_size_bytes - ), - } - if metadata is not None: - opts["metadata"] = metadata - - return AsyncGridIn(self._collection, session=session, **opts) - - @_csot.apply - async def upload_from_stream( - self, - filename: str, - source: Any, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[AsyncClientSession] = None, - ) -> ObjectId: - """Uploads a user file to a GridFS bucket. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - file_id = fs.upload_from_stream( - "test_file", - "data I want to store!", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - Returns the _id of the uploaded file. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param filename: The name of the file to upload. - :param source: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - async with self.open_upload_stream( - filename, chunk_size_bytes, metadata, session=session - ) as gin: - await gin.write(source) - - return cast(ObjectId, gin._id) - - @_csot.apply - async def upload_from_stream_with_id( - self, - file_id: Any, - filename: str, - source: Any, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[AsyncClientSession] = None, - ) -> None: - """Uploads a user file to a GridFS bucket with a custom file id. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - file_id = fs.upload_from_stream( - ObjectId(), - "test_file", - "data I want to store!", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param file_id: The id to use for this file. The id must not have - already been used for another file. - :param filename: The name of the file to upload. - :param source: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - async with self.open_upload_stream_with_id( - file_id, filename, chunk_size_bytes, metadata, session=session - ) as gin: - await gin.write(source) - - async def open_download_stream( - self, file_id: Any, session: Optional[AsyncClientSession] = None - ) -> AsyncGridOut: - """Opens a Stream from which the application can read the contents of - the stored file specified by file_id. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # get _id of file to read. - file_id = fs.upload_from_stream("test_file", "data I want to store!") - grid_out = fs.open_download_stream(file_id) - contents = grid_out.read() - - Returns an instance of :class:`~gridfs.grid_file.GridOut`. - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be downloaded. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - gout = AsyncGridOut(self._collection, file_id, session=session) - - # Raise NoFile now, instead of on first attribute access. - await gout.open() - return gout - - @_csot.apply - async def download_to_stream( - self, file_id: Any, destination: Any, session: Optional[AsyncClientSession] = None - ) -> None: - """Downloads the contents of the stored file specified by file_id and - writes the contents to `destination`. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get _id of file to read - file_id = fs.upload_from_stream("test_file", "data I want to store!") - # Get file to write to - file = open('myfile','wb+') - fs.download_to_stream(file_id, file) - file.seek(0) - contents = file.read() - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be downloaded. - :param destination: a file-like object implementing :meth:`write`. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - async with await self.open_download_stream(file_id, session=session) as gout: - while True: - chunk = await gout.readchunk() - if not len(chunk): - break - destination.write(chunk) - - @_csot.apply - async def delete(self, file_id: Any, session: Optional[AsyncClientSession] = None) -> None: - """Given an file_id, delete this stored file's files collection document - and associated chunks from a GridFS bucket. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get _id of file to delete - file_id = fs.upload_from_stream("test_file", "data I want to store!") - fs.delete(file_id) - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be deleted. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - _disallow_transactions(session) - res = await self._files.delete_one({"_id": file_id}, session=session) - await self._chunks.delete_many({"files_id": file_id}, session=session) - if not res.deleted_count: - raise NoFile("no file could be deleted because none matched %s" % file_id) - - @_csot.apply - async def delete_by_name( - self, filename: str, session: Optional[AsyncClientSession] = None - ) -> None: - """Given a filename, delete this stored file's files collection document(s) - and associated chunks from a GridFS bucket. - - For example:: - - my_db = AsyncMongoClient().test - fs = AsyncGridFSBucket(my_db) - await fs.upload_from_stream("test_file", "data I want to store!") - await fs.delete_by_name("test_file") - - Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. - - :param filename: The name of the file to be deleted. - :param session: a :class:`~pymongo.client_session.AsyncClientSession` - - .. versionadded:: 4.12 - """ - _disallow_transactions(session) - files = self._files.find({"filename": filename}, {"_id": 1}, session=session) - file_ids = [file["_id"] async for file in files] - res = await self._files.delete_many({"_id": {"$in": file_ids}}, session=session) - await self._chunks.delete_many({"files_id": {"$in": file_ids}}, session=session) - if not res.deleted_count: - raise NoFile(f"no file could be deleted because none matched filename {filename!r}") - - def find(self, *args: Any, **kwargs: Any) -> AsyncGridOutCursor: - """Find and return the files collection documents that match ``filter`` - - Returns a cursor that iterates across files matching - arbitrary queries on the files collection. Can be combined - with other modifiers for additional control. - - For example:: - - for grid_data in fs.find({"filename": "lisa.txt"}, - no_cursor_timeout=True): - data = grid_data.read() - - would iterate through all versions of "lisa.txt" stored in GridFS. - Note that setting no_cursor_timeout to True may be important to - prevent the cursor from timing out during long multi-file processing - work. - - As another example, the call:: - - most_recent_three = fs.find().sort("uploadDate", -1).limit(3) - - would return a cursor to the three most recently uploaded files - in GridFS. - - Follows a similar interface to - :meth:`~pymongo.collection.Collection.find` - in :class:`~pymongo.collection.Collection`. - - If a :class:`~pymongo.client_session.AsyncClientSession` is passed to - :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances - are associated with that session. - - :param filter: Search query. - :param batch_size: The number of documents to return per - batch. - :param limit: The maximum number of documents to return. - :param no_cursor_timeout: The server normally times out idle - cursors after an inactivity period (10 minutes) to prevent excess - memory use. Set this option to True prevent that. - :param skip: The number of documents to skip before - returning. - :param sort: The order by which to sort results. Defaults to - None. - """ - return AsyncGridOutCursor(self._collection, *args, **kwargs) - - async def open_download_stream_by_name( - self, filename: str, revision: int = -1, session: Optional[AsyncClientSession] = None - ) -> AsyncGridOut: - """Opens a Stream from which the application can read the contents of - `filename` and optional `revision`. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - grid_out = fs.open_download_stream_by_name("test_file") - contents = grid_out.read() - - Returns an instance of :class:`~gridfs.grid_file.GridOut`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` filename is not a string. - - :param filename: The name of the file to read from. - :param revision: Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - validate_string("filename", filename) - query = {"filename": filename} - _disallow_transactions(session) - cursor = self._files.find(query, session=session) - if revision < 0: - skip = abs(revision) - 1 - cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING) - else: - cursor.limit(-1).skip(revision).sort("uploadDate", ASCENDING) - try: - grid_file = await anext(cursor) - return AsyncGridOut(self._collection, file_document=grid_file, session=session) - except StopAsyncIteration: - raise NoFile("no version %d for filename %r" % (revision, filename)) from None - - @_csot.apply - async def download_to_stream_by_name( - self, - filename: str, - destination: Any, - revision: int = -1, - session: Optional[AsyncClientSession] = None, - ) -> None: - """Write the contents of `filename` (with optional `revision`) to - `destination`. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get file to write to - file = open('myfile','wb') - fs.download_to_stream_by_name("test_file", file) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` if `filename` is not a string. - - :param filename: The name of the file to read from. - :param destination: A file-like object that implements :meth:`write`. - :param revision: Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - async with await self.open_download_stream_by_name( - filename, revision, session=session - ) as gout: - while True: - chunk = await gout.readchunk() - if not len(chunk): - break - destination.write(chunk) - - async def rename( - self, file_id: Any, new_filename: str, session: Optional[AsyncClientSession] = None - ) -> None: - """Renames the stored file with the specified file_id. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get _id of file to rename - file_id = fs.upload_from_stream("test_file", "data I want to store!") - fs.rename(file_id, "new_test_name") - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be renamed. - :param new_filename: The new name of the file. - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - _disallow_transactions(session) - result = await self._files.update_one( - {"_id": file_id}, {"$set": {"filename": new_filename}}, session=session - ) - if not result.matched_count: - raise NoFile( - "no files could be renamed %r because none " - "matched file_id %i" % (new_filename, file_id) - ) - - async def rename_by_name( - self, filename: str, new_filename: str, session: Optional[AsyncClientSession] = None - ) -> None: - """Renames the stored file with the specified filename. - - For example:: - - my_db = AsyncMongoClient().test - fs = AsyncGridFSBucket(my_db) - await fs.upload_from_stream("test_file", "data I want to store!") - await fs.rename_by_name("test_file", "new_test_name") - - Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. - - :param filename: The filename of the file to be renamed. - :param new_filename: The new name of the file. - :param session: a :class:`~pymongo.client_session.AsyncClientSession` - - .. versionadded:: 4.12 - """ - _disallow_transactions(session) - result = await self._files.update_many( - {"filename": filename}, {"$set": {"filename": new_filename}}, session=session - ) - if not result.matched_count: - raise NoFile( - f"no files could be renamed {new_filename!r} because none matched filename {filename!r}" - ) - - -class AsyncGridIn: - """Class to write data to GridFS.""" - - def __init__( - self, - root_collection: AsyncCollection, - session: Optional[AsyncClientSession] = None, - **kwargs: Any, - ) -> None: - """Write a file to GridFS - - Application developers should generally not need to - instantiate this class directly - instead see the methods - provided by :class:`~gridfs.GridFS`. - - Raises :class:`TypeError` if `root_collection` is not an - instance of :class:`~pymongo.collection.AsyncCollection`. - - Any of the file level options specified in the `GridFS Spec - `_ may be passed as - keyword arguments. Any additional keyword arguments will be - set as additional fields on the file document. Valid keyword - arguments include: - - - ``"_id"``: unique ID for this file (default: - :class:`~bson.objectid.ObjectId`) - this ``"_id"`` must - not have already been used for another file - - - ``"filename"``: human name for the file - - - ``"contentType"`` or ``"content_type"``: valid mime-type - for the file - - - ``"chunkSize"`` or ``"chunk_size"``: size of each of the - chunks, in bytes (default: 255 kb) - - - ``"encoding"``: encoding used for this file. Any :class:`str` - that is written to the file will be converted to :class:`bytes`. - - :param root_collection: root collection to write to - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` to use for all - commands - :param kwargs: Any: file level options (see above) - - .. versionchanged:: 4.0 - Removed the `disable_md5` parameter. See - :ref:`removed-gridfs-checksum` for details. - - .. versionchanged:: 3.7 - Added the `disable_md5` parameter. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.0 - `root_collection` must use an acknowledged - :attr:`~pymongo.collection.AsyncCollection.write_concern` - """ - if not isinstance(root_collection, AsyncCollection): - raise TypeError( - f"root_collection must be an instance of AsyncCollection, not {type(root_collection)}" - ) - - if not root_collection.write_concern.acknowledged: - raise ConfigurationError("root_collection must use acknowledged write_concern") - _disallow_transactions(session) - - # Handle alternative naming - if "content_type" in kwargs: - kwargs["contentType"] = kwargs.pop("content_type") - if "chunk_size" in kwargs: - kwargs["chunkSize"] = kwargs.pop("chunk_size") - - coll = _clear_entity_type_registry(root_collection, read_preference=ReadPreference.PRIMARY) - - # Defaults - kwargs["_id"] = kwargs.get("_id", ObjectId()) - kwargs["chunkSize"] = kwargs.get("chunkSize", DEFAULT_CHUNK_SIZE) - object.__setattr__(self, "_session", session) - object.__setattr__(self, "_coll", coll) - object.__setattr__(self, "_chunks", coll.chunks) - object.__setattr__(self, "_file", kwargs) - object.__setattr__(self, "_buffer", io.BytesIO()) - object.__setattr__(self, "_position", 0) - object.__setattr__(self, "_chunk_number", 0) - object.__setattr__(self, "_closed", False) - object.__setattr__(self, "_ensured_index", False) - object.__setattr__(self, "_buffered_docs", []) - object.__setattr__(self, "_buffered_docs_size", 0) - - async def _create_index( - self, collection: AsyncCollection, index_key: Any, unique: bool - ) -> None: - doc = await collection.find_one(projection={"_id": 1}, session=self._session) - if doc is None: - try: - index_keys = [ - index_spec["key"] - async for index_spec in await collection.list_indexes(session=self._session) - ] - except OperationFailure: - index_keys = [] - if index_key not in index_keys: - await collection.create_index( - index_key.items(), unique=unique, session=self._session - ) - - async def _ensure_indexes(self) -> None: - if not object.__getattribute__(self, "_ensured_index"): - _disallow_transactions(self._session) - await self._create_index(self._coll.files, _F_INDEX, False) - await self._create_index(self._coll.chunks, _C_INDEX, True) - object.__setattr__(self, "_ensured_index", True) - - async def abort(self) -> None: - """Remove all chunks/files that may have been uploaded and close.""" - await self._coll.chunks.delete_many({"files_id": self._file["_id"]}, session=self._session) - await self._coll.files.delete_one({"_id": self._file["_id"]}, session=self._session) - object.__setattr__(self, "_closed", True) - - @property - def closed(self) -> bool: - """Is this file closed?""" - return self._closed - - _id: Any = _a_grid_in_property("_id", "The ``'_id'`` value for this file.", read_only=True) - filename: Optional[str] = _a_grid_in_property("filename", "Name of this file.") - name: Optional[str] = _a_grid_in_property("filename", "Alias for `filename`.") - content_type: Optional[str] = _a_grid_in_property( - "contentType", "DEPRECATED, will be removed in PyMongo 5.0. Mime-type for this file." - ) - length: int = _a_grid_in_property("length", "Length (in bytes) of this file.", closed_only=True) - chunk_size: int = _a_grid_in_property("chunkSize", "Chunk size for this file.", read_only=True) - upload_date: datetime.datetime = _a_grid_in_property( - "uploadDate", "Date that this file was uploaded.", closed_only=True - ) - md5: Optional[str] = _a_grid_in_property( - "md5", - "DEPRECATED, will be removed in PyMongo 5.0. MD5 of the contents of this file if an md5 sum was created.", - closed_only=True, - ) - - _buffer: io.BytesIO - _closed: bool - _buffered_docs: list[dict[str, Any]] - _buffered_docs_size: int - - def __getattr__(self, name: str) -> Any: - if name == "_coll": - return object.__getattribute__(self, name) - elif name in self._file: - return self._file[name] - raise AttributeError("GridIn object has no attribute '%s'" % name) - - def __setattr__(self, name: str, value: Any) -> None: - # For properties of this instance like _buffer, or descriptors set on - # the class like filename, use regular __setattr__ - if name in self.__dict__ or name in self.__class__.__dict__: - object.__setattr__(self, name, value) - else: - # All other attributes are part of the document in db.fs.files. - # Store them to be sent to server on close() or if closed, send - # them now. - self._file[name] = value - if self._closed: - if _IS_SYNC: - self._coll.files.update_one({"_id": self._file["_id"]}, {"$set": {name: value}}) - else: - raise AttributeError( - "AsyncGridIn does not support __setattr__ after being closed(). Set the attribute before closing the file or use AsyncGridIn.set() instead" - ) - - async def set(self, name: str, value: Any) -> None: - self._file[name] = value - if self._closed: - await self._coll.files.update_one({"_id": self._file["_id"]}, {"$set": {name: value}}) - - async def _flush_data(self, data: Any, force: bool = False) -> None: - """Flush `data` to a chunk.""" - await self._ensure_indexes() - assert len(data) <= self.chunk_size - if data: - self._buffered_docs.append( - {"files_id": self._file["_id"], "n": self._chunk_number, "data": data} - ) - self._buffered_docs_size += len(data) + _CHUNK_OVERHEAD - if not self._buffered_docs: - return - # Limit to 100,000 chunks or 32MB (+1 chunk) of data. - if ( - force - or self._buffered_docs_size >= _UPLOAD_BUFFER_SIZE - or len(self._buffered_docs) >= _UPLOAD_BUFFER_CHUNKS - ): - try: - await self._chunks.insert_many(self._buffered_docs, session=self._session) - except BulkWriteError as exc: - # For backwards compatibility, raise an insert_one style exception. - write_errors = exc.details["writeErrors"] - for err in write_errors: - if err.get("code") in (11000, 11001, 12582): # Duplicate key errors - self._raise_file_exists(self._file["_id"]) - result = {"writeErrors": write_errors} - wces = exc.details["writeConcernErrors"] - if wces: - result["writeConcernError"] = wces[-1] - _check_write_command_response(result) - raise - self._buffered_docs = [] - self._buffered_docs_size = 0 - self._chunk_number += 1 - self._position += len(data) - - async def _flush_buffer(self, force: bool = False) -> None: - """Flush the buffer contents out to a chunk.""" - await self._flush_data(self._buffer.getvalue(), force=force) - self._buffer.close() - self._buffer = io.BytesIO() - - async def _flush(self) -> Any: - """Flush the file to the database.""" - try: - await self._flush_buffer(force=True) - # The GridFS spec says length SHOULD be an Int64. - self._file["length"] = Int64(self._position) - self._file["uploadDate"] = datetime.datetime.now(tz=datetime.timezone.utc) - - return await self._coll.files.insert_one(self._file, session=self._session) - except DuplicateKeyError: - self._raise_file_exists(self._id) - - def _raise_file_exists(self, file_id: Any) -> NoReturn: - """Raise a FileExists exception for the given file_id.""" - raise FileExists("file with _id %r already exists" % file_id) - - async def close(self) -> None: - """Flush the file and close it. - - A closed file cannot be written any more. Calling - :meth:`close` more than once is allowed. - """ - if not self._closed: - await self._flush() - object.__setattr__(self, "_closed", True) - - def read(self, size: int = -1) -> NoReturn: - raise io.UnsupportedOperation("read") - - def readable(self) -> bool: - return False - - def seekable(self) -> bool: - return False - - async def write(self, data: Any) -> None: - """Write data to the file. There is no return value. - - `data` can be either a string of bytes or a file-like object - (implementing :meth:`read`). If the file has an - :attr:`encoding` attribute, `data` can also be a - :class:`str` instance, which will be encoded as - :attr:`encoding` before being written. - - Due to buffering, the data may not actually be written to the - database until the :meth:`close` method is called. Raises - :class:`ValueError` if this file is already closed. Raises - :class:`TypeError` if `data` is not an instance of - :class:`bytes`, a file-like object, or an instance of :class:`str`. - Unicode data is only allowed if the file has an :attr:`encoding` - attribute. - - :param data: string of bytes or file-like object to be written - to the file - """ - if self._closed: - raise ValueError("cannot write to a closed file") - - try: - # file-like - read = data.read - except AttributeError: - # string - if not isinstance(data, (str, bytes)): - raise TypeError("can only write strings or file-like objects") from None - if isinstance(data, str): - try: - data = data.encode(self.encoding) - except AttributeError: - raise TypeError( - "must specify an encoding for file in order to write str" - ) from None - read = io.BytesIO(data).read - - if inspect.iscoroutinefunction(read): - await self._write_async(read) - else: - if self._buffer.tell() > 0: - # Make sure to flush only when _buffer is complete - space = self.chunk_size - self._buffer.tell() - if space: - try: - to_write = read(space) - except BaseException: - await self.abort() - raise - self._buffer.write(to_write) - if len(to_write) < space: - return # EOF or incomplete - await self._flush_buffer() - to_write = read(self.chunk_size) - while to_write and len(to_write) == self.chunk_size: - await self._flush_data(to_write) - to_write = read(self.chunk_size) - self._buffer.write(to_write) - - async def _write_async(self, read: Any) -> None: - if self._buffer.tell() > 0: - # Make sure to flush only when _buffer is complete - space = self.chunk_size - self._buffer.tell() - if space: - try: - to_write = await read(space) - except BaseException: - await self.abort() - raise - self._buffer.write(to_write) - if len(to_write) < space: - return # EOF or incomplete - await self._flush_buffer() - to_write = await read(self.chunk_size) - while to_write and len(to_write) == self.chunk_size: - await self._flush_data(to_write) - to_write = await read(self.chunk_size) - self._buffer.write(to_write) - - async def writelines(self, sequence: Iterable[Any]) -> None: - """Write a sequence of strings to the file. - - Does not add separators. - """ - for line in sequence: - await self.write(line) - - def writeable(self) -> bool: - return True - - async def __aenter__(self) -> AsyncGridIn: - """Support for the context manager protocol.""" - return self - - async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Any: - """Support for the context manager protocol. - - Close the file if no exceptions occur and allow exceptions to propagate. - """ - if exc_type is None: - # No exceptions happened. - await self.close() - else: - # Something happened, at minimum mark as closed. - object.__setattr__(self, "_closed", True) - - # propagate exceptions - return False - - -GRIDOUT_BASE_CLASS = io.IOBase if _IS_SYNC else object # type: Any - - -class AsyncGridOut(GRIDOUT_BASE_CLASS): # type: ignore - - """Class to read data out of GridFS.""" - - def __init__( - self, - root_collection: AsyncCollection, - file_id: Optional[int] = None, - file_document: Optional[Any] = None, - session: Optional[AsyncClientSession] = None, - ) -> None: - """Read a file from GridFS - - Application developers should generally not need to - instantiate this class directly - instead see the methods - provided by :class:`~gridfs.GridFS`. - - Either `file_id` or `file_document` must be specified, - `file_document` will be given priority if present. Raises - :class:`TypeError` if `root_collection` is not an instance of - :class:`~pymongo.collection.AsyncCollection`. - - :param root_collection: root collection to read from - :param file_id: value of ``"_id"`` for the file to read - :param file_document: file document from - `root_collection.files` - :param session: a - :class:`~pymongo.client_session.AsyncClientSession` to use for all - commands - - .. versionchanged:: 3.8 - For better performance and to better follow the GridFS spec, - :class:`GridOut` now uses a single cursor to read all the chunks in - the file. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.0 - Creating a GridOut does not immediately retrieve the file metadata - from the server. Metadata is fetched when first needed. - """ - if not isinstance(root_collection, AsyncCollection): - raise TypeError( - f"root_collection must be an instance of AsyncCollection, not {type(root_collection)}" - ) - _disallow_transactions(session) - - root_collection = _clear_entity_type_registry(root_collection) - - super().__init__() - - self._chunks = root_collection.chunks - self._files = root_collection.files - self._file_id = file_id - self._buffer = EMPTY - # Start position within the current buffered chunk. - self._buffer_pos = 0 - self._chunk_iter = None - # Position within the total file. - self._position = 0 - self._file = file_document - self._session = session - if not _IS_SYNC: - self.closed = False - - _id: Any = _a_grid_out_property("_id", "The ``'_id'`` value for this file.") - filename: str = _a_grid_out_property("filename", "Name of this file.") - name: str = _a_grid_out_property("filename", "Alias for `filename`.") - content_type: Optional[str] = _a_grid_out_property( - "contentType", "DEPRECATED, will be removed in PyMongo 5.0. Mime-type for this file." - ) - length: int = _a_grid_out_property("length", "Length (in bytes) of this file.") - chunk_size: int = _a_grid_out_property("chunkSize", "Chunk size for this file.") - upload_date: datetime.datetime = _a_grid_out_property( - "uploadDate", "Date that this file was first uploaded." - ) - aliases: Optional[list[str]] = _a_grid_out_property( - "aliases", "DEPRECATED, will be removed in PyMongo 5.0. List of aliases for this file." - ) - metadata: Optional[Mapping[str, Any]] = _a_grid_out_property( - "metadata", "Metadata attached to this file." - ) - md5: Optional[str] = _a_grid_out_property( - "md5", - "DEPRECATED, will be removed in PyMongo 5.0. MD5 of the contents of this file if an md5 sum was created.", - ) - - _file: Any - _chunk_iter: Any - - if not _IS_SYNC: - closed: bool - - async def __anext__(self) -> bytes: - line = await self.readline() - if line: - return line - raise StopAsyncIteration() - - async def to_list(self) -> list[bytes]: - return [x async for x in self] # noqa: C416, RUF100 - - async def readline(self, size: int = -1) -> bytes: - """Read one line or up to `size` bytes from the file. - - :param size: the maximum number of bytes to read - """ - return await self._read_size_or_line(size=size, line=True) - - async def readlines(self, size: int = -1) -> list[bytes]: - """Read one line or up to `size` bytes from the file. - - :param size: the maximum number of bytes to read - """ - await self.open() - lines = [] - remainder = int(self.length) - self._position - bytes_read = 0 - while remainder > 0: - line = await self._read_size_or_line(line=True) - bytes_read += len(line) - lines.append(line) - remainder = int(self.length) - self._position - if 0 < size < bytes_read: - break - - return lines - - async def open(self) -> None: - if not self._file: - _disallow_transactions(self._session) - self._file = await self._files.find_one({"_id": self._file_id}, session=self._session) - if not self._file: - raise NoFile( - f"no file in gridfs collection {self._files!r} with _id {self._file_id!r}" - ) - - def __getattr__(self, name: str) -> Any: - if _IS_SYNC: - self.open() # type: ignore[unused-coroutine] - elif not self._file: - raise InvalidOperation( - "You must call AsyncGridOut.open() before accessing the %s property" % name - ) - if name in self._file: - return self._file[name] - raise AttributeError("GridOut object has no attribute '%s'" % name) - - def readable(self) -> bool: - return True - - async def readchunk(self) -> bytes: - """Reads a chunk at a time. If the current position is within a - chunk the remainder of the chunk is returned. - """ - await self.open() - received = len(self._buffer) - self._buffer_pos - chunk_data = EMPTY - chunk_size = int(self.chunk_size) - - if received > 0: - chunk_data = self._buffer[self._buffer_pos :] - elif self._position < int(self.length): - chunk_number = int((received + self._position) / chunk_size) - if self._chunk_iter is None: - self._chunk_iter = _AsyncGridOutChunkIterator( - self, self._chunks, self._session, chunk_number - ) - - chunk = await self._chunk_iter.next() - chunk_data = chunk["data"][self._position % chunk_size :] - - if not chunk_data: - raise CorruptGridFile("truncated chunk") - - self._position += len(chunk_data) - self._buffer = EMPTY - self._buffer_pos = 0 - return chunk_data - - async def _read_size_or_line(self, size: int = -1, line: bool = False) -> bytes: - """Internal read() and readline() helper.""" - await self.open() - remainder = int(self.length) - self._position - if size < 0 or size > remainder: - size = remainder - - if size == 0: - return EMPTY - - received = 0 - data = [] - while received < size: - needed = size - received - if self._buffer: - # Optimization: Read the buffer with zero byte copies. - buf = self._buffer - chunk_start = self._buffer_pos - chunk_data = memoryview(buf)[self._buffer_pos :] - self._buffer = EMPTY - self._buffer_pos = 0 - self._position += len(chunk_data) - else: - buf = await self.readchunk() - chunk_start = 0 - chunk_data = memoryview(buf) - if line: - pos = buf.find(NEWLN, chunk_start, chunk_start + needed) - chunk_start - if pos >= 0: - # Decrease size to exit the loop. - size = received + pos + 1 - needed = pos + 1 - if len(chunk_data) > needed: - data.append(chunk_data[:needed]) - # Optimization: Save the buffer with zero byte copies. - self._buffer = buf - self._buffer_pos = chunk_start + needed - self._position -= len(self._buffer) - self._buffer_pos - else: - data.append(chunk_data) - received += len(chunk_data) - - # Detect extra chunks after reading the entire file. - if size == remainder and self._chunk_iter: - try: - await self._chunk_iter.next() - except StopAsyncIteration: - pass - - return b"".join(data) - - async def read(self, size: int = -1) -> bytes: - """Read at most `size` bytes from the file (less if there - isn't enough data). - - The bytes are returned as an instance of :class:`bytes` - If `size` is negative or omitted all data is read. - - :param size: the number of bytes to read - - .. versionchanged:: 3.8 - This method now only checks for extra chunks after reading the - entire file. Previously, this method would check for extra chunks - on every call. - """ - return await self._read_size_or_line(size=size) - - def tell(self) -> int: - """Return the current position of this file.""" - return self._position - - async def seek(self, pos: int, whence: int = _SEEK_SET) -> int: - """Set the current position of this file. - - :param pos: the position (or offset if using relative - positioning) to seek to - :param whence: where to seek - from. :attr:`os.SEEK_SET` (``0``) for absolute file - positioning, :attr:`os.SEEK_CUR` (``1``) to seek relative - to the current position, :attr:`os.SEEK_END` (``2``) to - seek relative to the file's end. - - .. versionchanged:: 4.1 - The method now returns the new position in the file, to - conform to the behavior of :meth:`io.IOBase.seek`. - """ - if whence == _SEEK_SET: - new_pos = pos - elif whence == _SEEK_CUR: - new_pos = self._position + pos - elif whence == _SEEK_END: - new_pos = int(self.length) + pos - else: - raise OSError(22, "Invalid value for `whence`") - - if new_pos < 0: - raise OSError(22, "Invalid value for `pos` - must be positive") - - # Optimization, continue using the same buffer and chunk iterator. - if new_pos == self._position: - return new_pos - - self._position = new_pos - self._buffer = EMPTY - self._buffer_pos = 0 - if self._chunk_iter: - await self._chunk_iter.close() - self._chunk_iter = None - return new_pos - - def seekable(self) -> bool: - return True - - def __aiter__(self) -> AsyncGridOut: - """Return an iterator over all of this file's data. - - The iterator will return lines (delimited by ``b'\\n'``) of - :class:`bytes`. This can be useful when serving files - using a webserver that handles such an iterator efficiently. - - .. versionchanged:: 3.8 - The iterator now raises :class:`CorruptGridFile` when encountering - any truncated, missing, or extra chunk in a file. The previous - behavior was to only raise :class:`CorruptGridFile` on a missing - chunk. - - .. versionchanged:: 4.0 - The iterator now iterates over *lines* in the file, instead - of chunks, to conform to the base class :py:class:`io.IOBase`. - Use :meth:`GridOut.readchunk` to read chunk by chunk instead - of line by line. - """ - return self - - async def close(self) -> None: - """Make GridOut more generically file-like.""" - if self._chunk_iter: - await self._chunk_iter.close() - self._chunk_iter = None - if _IS_SYNC: - super().close() - else: - self.closed = True - - def write(self, value: Any) -> NoReturn: - raise io.UnsupportedOperation("write") - - def writelines(self, lines: Any) -> NoReturn: - raise io.UnsupportedOperation("writelines") - - def writable(self) -> bool: - return False - - async def __aenter__(self) -> AsyncGridOut: - """Makes it possible to use :class:`AsyncGridOut` files - with the async context manager protocol. - """ - return self - - async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Any: - """Makes it possible to use :class:`AsyncGridOut` files - with the async context manager protocol. - """ - await self.close() - return False - - def fileno(self) -> NoReturn: - raise io.UnsupportedOperation("fileno") - - def flush(self) -> None: - # GridOut is read-only, so flush does nothing. - pass - - def isatty(self) -> bool: - return False - - def truncate(self, size: Optional[int] = None) -> NoReturn: - # See https://docs.python.org/3/library/io.html#io.IOBase.writable - # for why truncate has to raise. - raise io.UnsupportedOperation("truncate") - - # Override IOBase.__del__ otherwise it will lead to __getattr__ on - # __IOBase_closed which calls _ensure_file and potentially performs I/O. - # We cannot do I/O in __del__ since it can lead to a deadlock. - def __del__(self) -> None: - pass - - -class _AsyncGridOutChunkIterator: - """Iterates over a file's chunks using a single cursor. - - Raises CorruptGridFile when encountering any truncated, missing, or extra - chunk in a file. - """ - - def __init__( - self, - grid_out: AsyncGridOut, - chunks: AsyncCollection, - session: Optional[AsyncClientSession], - next_chunk: Any, - ) -> None: - self._id = grid_out._id - self._chunk_size = int(grid_out.chunk_size) - self._length = int(grid_out.length) - self._chunks = chunks - self._session = session - self._next_chunk = next_chunk - self._num_chunks = math.ceil(float(self._length) / self._chunk_size) - self._cursor = None - - _cursor: Optional[AsyncCursor] - - def expected_chunk_length(self, chunk_n: int) -> int: - if chunk_n < self._num_chunks - 1: - return self._chunk_size - return self._length - (self._chunk_size * (self._num_chunks - 1)) - - def __aiter__(self) -> _AsyncGridOutChunkIterator: - return self - - def _create_cursor(self) -> None: - filter = {"files_id": self._id} - if self._next_chunk > 0: - filter["n"] = {"$gte": self._next_chunk} - _disallow_transactions(self._session) - self._cursor = self._chunks.find(filter, sort=[("n", 1)], session=self._session) - - async def _next_with_retry(self) -> Mapping[str, Any]: - """Return the next chunk and retry once on CursorNotFound. - - We retry on CursorNotFound to maintain backwards compatibility in - cases where two calls to read occur more than 10 minutes apart (the - server's default cursor timeout). - """ - if self._cursor is None: - self._create_cursor() - assert self._cursor is not None - try: - return await self._cursor.next() - except CursorNotFound: - await self._cursor.close() - self._create_cursor() - return await self._cursor.next() - - async def next(self) -> Mapping[str, Any]: - try: - chunk = await self._next_with_retry() - except StopAsyncIteration: - if self._next_chunk >= self._num_chunks: - raise - raise CorruptGridFile("no chunk #%d" % self._next_chunk) from None - - if chunk["n"] != self._next_chunk: - await self.close() - raise CorruptGridFile( - "Missing chunk: expected chunk #%d but found " - "chunk with n=%d" % (self._next_chunk, chunk["n"]) - ) - - if chunk["n"] >= self._num_chunks: - # According to spec, ignore extra chunks if they are empty. - if len(chunk["data"]): - await self.close() - raise CorruptGridFile( - "Extra chunk found: expected %d chunks but found " - "chunk with n=%d" % (self._num_chunks, chunk["n"]) - ) - - expected_length = self.expected_chunk_length(chunk["n"]) - if len(chunk["data"]) != expected_length: - await self.close() - raise CorruptGridFile( - "truncated chunk #%d: expected chunk length to be %d but " - "found chunk with length %d" % (chunk["n"], expected_length, len(chunk["data"])) - ) - - self._next_chunk += 1 - return chunk - - __anext__ = next - - async def close(self) -> None: - if self._cursor: - await self._cursor.close() - self._cursor = None - - -class AsyncGridOutIterator: - def __init__( - self, grid_out: AsyncGridOut, chunks: AsyncCollection, session: AsyncClientSession - ): - self._chunk_iter = _AsyncGridOutChunkIterator(grid_out, chunks, session, 0) - - def __aiter__(self) -> AsyncGridOutIterator: - return self - - async def next(self) -> bytes: - chunk = await self._chunk_iter.next() - return bytes(chunk["data"]) - - __anext__ = next - - -class AsyncGridOutCursor(AsyncCursor): - """A cursor / iterator for returning GridOut objects as the result - of an arbitrary query against the GridFS files collection. - """ - - def __init__( - self, - collection: AsyncCollection, - filter: Optional[Mapping[str, Any]] = None, - skip: int = 0, - limit: int = 0, - no_cursor_timeout: bool = False, - sort: Optional[Any] = None, - batch_size: int = 0, - session: Optional[AsyncClientSession] = None, - ) -> None: - """Create a new cursor, similar to the normal - :class:`~pymongo.cursor.Cursor`. - - Should not be called directly by application developers - see - the :class:`~gridfs.GridFS` method :meth:`~gridfs.GridFS.find` instead. - - .. versionadded 2.7 - - .. seealso:: The MongoDB documentation on `cursors `_. - """ - _disallow_transactions(session) - collection = _clear_entity_type_registry(collection) - - # Hold on to the base "fs" collection to create GridOut objects later. - self._root_collection = collection - - super().__init__( - collection.files, - filter, - skip=skip, - limit=limit, - no_cursor_timeout=no_cursor_timeout, - sort=sort, - batch_size=batch_size, - session=session, - ) - - async def next(self) -> AsyncGridOut: - """Get next GridOut object from cursor.""" - _disallow_transactions(self.session) - next_file = await super().next() - return AsyncGridOut(self._root_collection, file_document=next_file, session=self.session) - - async def to_list(self, length: Optional[int] = None) -> list[AsyncGridOut]: - """Convert the cursor to a list.""" - if length is None: - return [x async for x in self] # noqa: C416,RUF100 - if length < 1: - raise ValueError("to_list() length must be greater than 0") - ret = [] - for _ in range(length): - ret.append(await self.next()) - return ret - - __anext__ = next - - def add_option(self, *args: Any, **kwargs: Any) -> NoReturn: - raise NotImplementedError("Method does not exist for GridOutCursor") - - def remove_option(self, *args: Any, **kwargs: Any) -> NoReturn: - raise NotImplementedError("Method does not exist for GridOutCursor") - - def _clone_base(self, session: Optional[AsyncClientSession]) -> AsyncGridOutCursor: - """Creates an empty GridOutCursor for information to be copied into.""" - return AsyncGridOutCursor(self._root_collection, session=session) diff --git a/venv/lib/python3.12/site-packages/gridfs/errors.py b/venv/lib/python3.12/site-packages/gridfs/errors.py deleted file mode 100644 index e8c02cef..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/errors.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2009-2015 MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Exceptions raised by the :mod:`gridfs` package""" -from __future__ import annotations - -from pymongo.errors import PyMongoError - - -class GridFSError(PyMongoError): - """Base class for all GridFS exceptions.""" - - -class CorruptGridFile(GridFSError): - """Raised when a file in :class:`~gridfs.GridFS` is malformed.""" - - -class NoFile(GridFSError): - """Raised when trying to read from a non-existent file.""" - - -class FileExists(GridFSError): - """Raised when trying to create a file that already exists.""" diff --git a/venv/lib/python3.12/site-packages/gridfs/grid_file.py b/venv/lib/python3.12/site-packages/gridfs/grid_file.py deleted file mode 100644 index b2cab715..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/grid_file.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2024-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Re-import of synchronous gridfs API for compatibility.""" -from __future__ import annotations - -from gridfs.synchronous.grid_file import * # noqa: F403 diff --git a/venv/lib/python3.12/site-packages/gridfs/grid_file_shared.py b/venv/lib/python3.12/site-packages/gridfs/grid_file_shared.py deleted file mode 100644 index 79a0ad7f..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/grid_file_shared.py +++ /dev/null @@ -1,168 +0,0 @@ -from __future__ import annotations - -import os -import warnings -from typing import Any, Optional - -from pymongo import ASCENDING -from pymongo.common import MAX_MESSAGE_SIZE -from pymongo.errors import InvalidOperation - -_SEEK_SET = os.SEEK_SET -_SEEK_CUR = os.SEEK_CUR -_SEEK_END = os.SEEK_END - -EMPTY = b"" -NEWLN = b"\n" - -"""Default chunk size, in bytes.""" -# Slightly under a power of 2, to work well with server's record allocations. -DEFAULT_CHUNK_SIZE = 255 * 1024 -# The number of chunked bytes to buffer before calling insert_many. -_UPLOAD_BUFFER_SIZE = MAX_MESSAGE_SIZE -# The number of chunk documents to buffer before calling insert_many. -_UPLOAD_BUFFER_CHUNKS = 100000 -# Rough BSON overhead of a chunk document not including the chunk data itself. -# Essentially len(encode({"_id": ObjectId(), "files_id": ObjectId(), "n": 1, "data": ""})) -_CHUNK_OVERHEAD = 60 - -_C_INDEX: dict[str, Any] = {"files_id": ASCENDING, "n": ASCENDING} -_F_INDEX: dict[str, Any] = {"filename": ASCENDING, "uploadDate": ASCENDING} - - -def _a_grid_in_property( - field_name: str, - docstring: str, - read_only: Optional[bool] = False, - closed_only: Optional[bool] = False, -) -> Any: - """Create a GridIn property.""" - - warn_str = "" - if docstring.startswith("DEPRECATED,"): - warn_str = ( - f"GridIn property '{field_name}' is deprecated and will be removed in PyMongo 5.0" - ) - - def getter(self: Any) -> Any: - if warn_str: - warnings.warn(warn_str, stacklevel=2, category=DeprecationWarning) - if closed_only and not self._closed: - raise AttributeError("can only get %r on a closed file" % field_name) - # Protect against PHP-237 - if field_name == "length": - return self._file.get(field_name, 0) - return self._file.get(field_name, None) - - def setter(self: Any, value: Any) -> Any: - if warn_str: - warnings.warn(warn_str, stacklevel=2, category=DeprecationWarning) - if self._closed: - raise InvalidOperation( - "AsyncGridIn does not support __setattr__ after being closed(). Set the attribute before closing the file or use AsyncGridIn.set() instead" - ) - self._file[field_name] = value - - if read_only: - docstring += "\n\nThis attribute is read-only." - elif closed_only: - docstring = "{}\n\n{}".format( - docstring, - "This attribute is read-only and " - "can only be read after :meth:`close` " - "has been called.", - ) - - if not read_only and not closed_only: - return property(getter, setter, doc=docstring) - return property(getter, doc=docstring) - - -def _a_grid_out_property(field_name: str, docstring: str) -> Any: - """Create a GridOut property.""" - - def a_getter(self: Any) -> Any: - if not self._file: - raise InvalidOperation( - "You must call GridOut.open() before accessing " "the %s property" % field_name - ) - # Protect against PHP-237 - if field_name == "length": - return self._file.get(field_name, 0) - return self._file.get(field_name, None) - - docstring += "\n\nThis attribute is read-only." - return property(a_getter, doc=docstring) - - -def _grid_in_property( - field_name: str, - docstring: str, - read_only: Optional[bool] = False, - closed_only: Optional[bool] = False, -) -> Any: - """Create a GridIn property.""" - warn_str = "" - if docstring.startswith("DEPRECATED,"): - warn_str = ( - f"GridIn property '{field_name}' is deprecated and will be removed in PyMongo 5.0" - ) - - def getter(self: Any) -> Any: - if warn_str: - warnings.warn(warn_str, stacklevel=2, category=DeprecationWarning) - if closed_only and not self._closed: - raise AttributeError("can only get %r on a closed file" % field_name) - # Protect against PHP-237 - if field_name == "length": - return self._file.get(field_name, 0) - return self._file.get(field_name, None) - - def setter(self: Any, value: Any) -> Any: - if warn_str: - warnings.warn(warn_str, stacklevel=2, category=DeprecationWarning) - if self._closed: - self._coll.files.update_one({"_id": self._file["_id"]}, {"$set": {field_name: value}}) - self._file[field_name] = value - - if read_only: - docstring += "\n\nThis attribute is read-only." - elif closed_only: - docstring = "{}\n\n{}".format( - docstring, - "This attribute is read-only and " - "can only be read after :meth:`close` " - "has been called.", - ) - - if not read_only and not closed_only: - return property(getter, setter, doc=docstring) - return property(getter, doc=docstring) - - -def _grid_out_property(field_name: str, docstring: str) -> Any: - """Create a GridOut property.""" - warn_str = "" - if docstring.startswith("DEPRECATED,"): - warn_str = ( - f"GridOut property '{field_name}' is deprecated and will be removed in PyMongo 5.0" - ) - - def getter(self: Any) -> Any: - if warn_str: - warnings.warn(warn_str, stacklevel=2, category=DeprecationWarning) - self.open() - - # Protect against PHP-237 - if field_name == "length": - return self._file.get(field_name, 0) - return self._file.get(field_name, None) - - docstring += "\n\nThis attribute is read-only." - return property(getter, doc=docstring) - - -def _clear_entity_type_registry(entity: Any, **kwargs: Any) -> Any: - """Clear the given database/collection object's type registry.""" - codecopts = entity.codec_options.with_options(type_registry=None) - return entity.with_options(codec_options=codecopts, **kwargs) diff --git a/venv/lib/python3.12/site-packages/gridfs/py.typed b/venv/lib/python3.12/site-packages/gridfs/py.typed deleted file mode 100644 index 0f405706..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# PEP-561 Support File. -# "Package maintainers who wish to support type checking of their code MUST add a marker file named py.typed to their package supporting typing". diff --git a/venv/lib/python3.12/site-packages/gridfs/synchronous/__init__.py b/venv/lib/python3.12/site-packages/gridfs/synchronous/__init__.py deleted file mode 100644 index bc270436..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/synchronous/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GridFS is a specification for storing large objects in Mongo. - -The :mod:`gridfs` package is an implementation of GridFS on top of -:mod:`pymongo`, exposing a file-like interface. - -.. seealso:: The MongoDB documentation on `gridfs `_. -""" -from __future__ import annotations - -from gridfs.errors import NoFile -from gridfs.grid_file_shared import DEFAULT_CHUNK_SIZE -from gridfs.synchronous.grid_file import ( - GridFS, - GridFSBucket, - GridIn, - GridOut, - GridOutCursor, -) - -__all__ = [ - "GridFS", - "GridFSBucket", - "NoFile", - "DEFAULT_CHUNK_SIZE", - "GridIn", - "GridOut", - "GridOutCursor", -] diff --git a/venv/lib/python3.12/site-packages/gridfs/synchronous/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/synchronous/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 3b437cae..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/synchronous/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/synchronous/__pycache__/grid_file.cpython-312.pyc b/venv/lib/python3.12/site-packages/gridfs/synchronous/__pycache__/grid_file.cpython-312.pyc deleted file mode 100644 index 55dc1e54..00000000 Binary files a/venv/lib/python3.12/site-packages/gridfs/synchronous/__pycache__/grid_file.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/gridfs/synchronous/grid_file.py b/venv/lib/python3.12/site-packages/gridfs/synchronous/grid_file.py deleted file mode 100644 index d0a4c7fc..00000000 --- a/venv/lib/python3.12/site-packages/gridfs/synchronous/grid_file.py +++ /dev/null @@ -1,1994 +0,0 @@ -# Copyright 2009-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tools for representing files stored in GridFS.""" -from __future__ import annotations - -import datetime -import inspect -import io -import math -from collections import abc -from typing import Any, Iterable, Mapping, NoReturn, Optional, cast - -from bson.int64 import Int64 -from bson.objectid import ObjectId -from gridfs.errors import CorruptGridFile, FileExists, NoFile -from gridfs.grid_file_shared import ( - _C_INDEX, - _CHUNK_OVERHEAD, - _F_INDEX, - _SEEK_CUR, - _SEEK_END, - _SEEK_SET, - _UPLOAD_BUFFER_CHUNKS, - _UPLOAD_BUFFER_SIZE, - DEFAULT_CHUNK_SIZE, - EMPTY, - NEWLN, - _clear_entity_type_registry, - _grid_in_property, - _grid_out_property, -) -from pymongo import ASCENDING, DESCENDING, WriteConcern, _csot -from pymongo.common import validate_string -from pymongo.errors import ( - BulkWriteError, - ConfigurationError, - CursorNotFound, - DuplicateKeyError, - InvalidOperation, - OperationFailure, -) -from pymongo.helpers_shared import _check_write_command_response -from pymongo.read_preferences import ReadPreference, _ServerMode -from pymongo.synchronous.client_session import ClientSession -from pymongo.synchronous.collection import Collection -from pymongo.synchronous.cursor import Cursor -from pymongo.synchronous.database import Database -from pymongo.synchronous.helpers import next - -_IS_SYNC = True - - -def _disallow_transactions(session: Optional[ClientSession]) -> None: - if session and session.in_transaction: - raise InvalidOperation("GridFS does not support multi-document transactions") - - -class GridFS: - """An instance of GridFS on top of a single Database.""" - - def __init__(self, database: Database, collection: str = "fs"): - """Create a new instance of :class:`GridFS`. - - Raises :class:`TypeError` if `database` is not an instance of - :class:`~pymongo.database.Database`. - - :param database: database to use - :param collection: root collection to use - - .. versionchanged:: 4.0 - Removed the `disable_md5` parameter. See - :ref:`removed-gridfs-checksum` for details. - - .. versionchanged:: 3.11 - Running a GridFS operation in a transaction now always raises an - error. GridFS does not support multi-document transactions. - - .. versionchanged:: 3.7 - Added the `disable_md5` parameter. - - .. versionchanged:: 3.1 - Indexes are only ensured on the first write to the DB. - - .. versionchanged:: 3.0 - `database` must use an acknowledged - :attr:`~pymongo.database.Database.write_concern` - - .. seealso:: The MongoDB documentation on `gridfs `_. - """ - if not isinstance(database, Database): - raise TypeError(f"database must be an instance of Database, not {type(database)}") - - database = _clear_entity_type_registry(database) - - if not database.write_concern.acknowledged: - raise ConfigurationError("database must use acknowledged write_concern") - - self._collection = database[collection] - self._files = self._collection.files - self._chunks = self._collection.chunks - - def new_file(self, **kwargs: Any) -> GridIn: - """Create a new file in GridFS. - - Returns a new :class:`~gridfs.grid_file.GridIn` instance to - which data can be written. Any keyword arguments will be - passed through to :meth:`~gridfs.grid_file.GridIn`. - - If the ``"_id"`` of the file is manually specified, it must - not already exist in GridFS. Otherwise - :class:`~gridfs.errors.FileExists` is raised. - - :param kwargs: keyword arguments for file creation - """ - return GridIn(self._collection, **kwargs) - - def put(self, data: Any, **kwargs: Any) -> Any: - """Put data in GridFS as a new file. - - Equivalent to doing:: - - with fs.new_file(**kwargs) as f: - f.write(data) - - `data` can be either an instance of :class:`bytes` or a file-like - object providing a :meth:`read` method. If an `encoding` keyword - argument is passed, `data` can also be a :class:`str` instance, which - will be encoded as `encoding` before being written. Any keyword - arguments will be passed through to the created file - see - :meth:`~gridfs.grid_file.GridIn` for possible arguments. Returns the - ``"_id"`` of the created file. - - If the ``"_id"`` of the file is manually specified, it must - not already exist in GridFS. Otherwise - :class:`~gridfs.errors.FileExists` is raised. - - :param data: data to be written as a file. - :param kwargs: keyword arguments for file creation - - .. versionchanged:: 3.0 - w=0 writes to GridFS are now prohibited. - """ - with GridIn(self._collection, **kwargs) as grid_file: - grid_file.write(data) - return grid_file._id - - def get(self, file_id: Any, session: Optional[ClientSession] = None) -> GridOut: - """Get a file from GridFS by ``"_id"``. - - Returns an instance of :class:`~gridfs.grid_file.GridOut`, - which provides a file-like interface for reading. - - :param file_id: ``"_id"`` of the file to get - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - gout = GridOut(self._collection, file_id, session=session) - - # Raise NoFile now, instead of on first attribute access. - gout.open() - return gout - - def get_version( - self, - filename: Optional[str] = None, - version: Optional[int] = -1, - session: Optional[ClientSession] = None, - **kwargs: Any, - ) -> GridOut: - """Get a file from GridFS by ``"filename"`` or metadata fields. - - Returns a version of the file in GridFS whose filename matches - `filename` and whose metadata fields match the supplied keyword - arguments, as an instance of :class:`~gridfs.grid_file.GridOut`. - - Version numbering is a convenience atop the GridFS API provided - by MongoDB. If more than one file matches the query (either by - `filename` alone, by metadata fields, or by a combination of - both), then version ``-1`` will be the most recently uploaded - matching file, ``-2`` the second most recently - uploaded, etc. Version ``0`` will be the first version - uploaded, ``1`` the second version, etc. So if three versions - have been uploaded, then version ``0`` is the same as version - ``-3``, version ``1`` is the same as version ``-2``, and - version ``2`` is the same as version ``-1``. - - Raises :class:`~gridfs.errors.NoFile` if no such version of - that file exists. - - :param filename: ``"filename"`` of the file to get, or `None` - :param version: version of the file to get (defaults - to -1, the most recent version uploaded) - :param session: a - :class:`~pymongo.client_session.ClientSession` - :param kwargs: find files by custom metadata. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.1 - ``get_version`` no longer ensures indexes. - """ - query = kwargs - if filename is not None: - query["filename"] = filename - - _disallow_transactions(session) - cursor = self._files.find(query, session=session) - if version is None: - version = -1 - if version < 0: - skip = abs(version) - 1 - cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING) - else: - cursor.limit(-1).skip(version).sort("uploadDate", ASCENDING) - try: - doc = next(cursor) - return GridOut(self._collection, file_document=doc, session=session) - except StopIteration: - raise NoFile("no version %d for filename %r" % (version, filename)) from None - - def get_last_version( - self, - filename: Optional[str] = None, - session: Optional[ClientSession] = None, - **kwargs: Any, - ) -> GridOut: - """Get the most recent version of a file in GridFS by ``"filename"`` - or metadata fields. - - Equivalent to calling :meth:`get_version` with the default - `version` (``-1``). - - :param filename: ``"filename"`` of the file to get, or `None` - :param session: a - :class:`~pymongo.client_session.ClientSession` - :param kwargs: find files by custom metadata. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - return self.get_version(filename=filename, session=session, **kwargs) - - # TODO add optional safe mode for chunk removal? - def delete(self, file_id: Any, session: Optional[ClientSession] = None) -> None: - """Delete a file from GridFS by ``"_id"``. - - Deletes all data belonging to the file with ``"_id"``: - `file_id`. - - .. warning:: Any processes/threads reading from the file while - this method is executing will likely see an invalid/corrupt - file. Care should be taken to avoid concurrent reads to a file - while it is being deleted. - - .. note:: Deletes of non-existent files are considered successful - since the end result is the same: no file with that _id remains. - - :param file_id: ``"_id"`` of the file to delete - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.1 - ``delete`` no longer ensures indexes. - """ - _disallow_transactions(session) - self._files.delete_one({"_id": file_id}, session=session) - self._chunks.delete_many({"files_id": file_id}, session=session) - - def list(self, session: Optional[ClientSession] = None) -> list[str]: - """List the names of all files stored in this instance of - :class:`GridFS`. - - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.1 - ``list`` no longer ensures indexes. - """ - _disallow_transactions(session) - # With an index, distinct includes documents with no filename - # as None. - return [ - name for name in self._files.distinct("filename", session=session) if name is not None - ] - - def find_one( - self, - filter: Optional[Any] = None, - session: Optional[ClientSession] = None, - *args: Any, - **kwargs: Any, - ) -> Optional[GridOut]: - """Get a single file from gridfs. - - All arguments to :meth:`find` are also valid arguments for - :meth:`find_one`, although any `limit` argument will be - ignored. Returns a single :class:`~gridfs.grid_file.GridOut`, - or ``None`` if no matching file is found. For example: - - .. code-block: python - - file = fs.find_one({"filename": "lisa.txt"}) - - :param filter: a dictionary specifying - the query to be performing OR any other type to be used as - the value for a query for ``"_id"`` in the file collection. - :param args: any additional positional arguments are - the same as the arguments to :meth:`find`. - :param session: a - :class:`~pymongo.client_session.ClientSession` - :param kwargs: any additional keyword arguments - are the same as the arguments to :meth:`find`. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - if filter is not None and not isinstance(filter, abc.Mapping): - filter = {"_id": filter} - - _disallow_transactions(session) - for f in self.find(filter, *args, session=session, **kwargs): - return f - - return None - - def find(self, *args: Any, **kwargs: Any) -> GridOutCursor: - """Query GridFS for files. - - Returns a cursor that iterates across files matching - arbitrary queries on the files collection. Can be combined - with other modifiers for additional control. For example:: - - for grid_out in fs.find({"filename": "lisa.txt"}, - no_cursor_timeout=True): - data = grid_out.read() - - would iterate through all versions of "lisa.txt" stored in GridFS. - Note that setting no_cursor_timeout to True may be important to - prevent the cursor from timing out during long multi-file processing - work. - - As another example, the call:: - - most_recent_three = fs.find().sort("uploadDate", -1).limit(3) - - would return a cursor to the three most recently uploaded files - in GridFS. - - Follows a similar interface to - :meth:`~pymongo.collection.Collection.find` - in :class:`~pymongo.collection.Collection`. - - If a :class:`~pymongo.client_session.ClientSession` is passed to - :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances - are associated with that session. - - :param filter: A query document that selects which files - to include in the result set. Can be an empty document to include - all files. - :param skip: the number of files to omit (from - the start of the result set) when returning the results - :param limit: the maximum number of results to - return - :param no_cursor_timeout: if False (the default), any - returned cursor is closed by the server after 10 minutes of - inactivity. If set to True, the returned cursor will never - time out on the server. Care should be taken to ensure that - cursors with no_cursor_timeout turned on are properly closed. - :param sort: a list of (key, direction) pairs - specifying the sort order for this query. See - :meth:`~pymongo.cursor.Cursor.sort` for details. - - Raises :class:`TypeError` if any of the arguments are of - improper type. Returns an instance of - :class:`~gridfs.grid_file.GridOutCursor` - corresponding to this query. - - .. versionchanged:: 3.0 - Removed the read_preference, tag_sets, and - secondary_acceptable_latency_ms options. - .. versionadded:: 2.7 - .. seealso:: The MongoDB documentation on `find `_. - """ - return GridOutCursor(self._collection, *args, **kwargs) - - def exists( - self, - document_or_id: Optional[Any] = None, - session: Optional[ClientSession] = None, - **kwargs: Any, - ) -> bool: - """Check if a file exists in this instance of :class:`GridFS`. - - The file to check for can be specified by the value of its - ``_id`` key, or by passing in a query document. A query - document can be passed in as dictionary, or by using keyword - arguments. Thus, the following three calls are equivalent: - - >>> fs.exists(file_id) - >>> fs.exists({"_id": file_id}) - >>> fs.exists(_id=file_id) - - As are the following two calls: - - >>> fs.exists({"filename": "mike.txt"}) - >>> fs.exists(filename="mike.txt") - - And the following two: - - >>> fs.exists({"foo": {"$gt": 12}}) - >>> fs.exists(foo={"$gt": 12}) - - Returns ``True`` if a matching file exists, ``False`` - otherwise. Calls to :meth:`exists` will not automatically - create appropriate indexes; application developers should be - sure to create indexes if needed and as appropriate. - - :param document_or_id: query document, or _id of the - document to check for - :param session: a - :class:`~pymongo.client_session.ClientSession` - :param kwargs: keyword arguments are used as a - query document, if they're present. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - _disallow_transactions(session) - if kwargs: - f = self._files.find_one(kwargs, ["_id"], session=session) - else: - f = self._files.find_one(document_or_id, ["_id"], session=session) - - return f is not None - - -class GridFSBucket: - """An instance of GridFS on top of a single Database.""" - - def __init__( - self, - db: Database, - bucket_name: str = "fs", - chunk_size_bytes: int = DEFAULT_CHUNK_SIZE, - write_concern: Optional[WriteConcern] = None, - read_preference: Optional[_ServerMode] = None, - ) -> None: - """Create a new instance of :class:`GridFSBucket`. - - Raises :exc:`TypeError` if `database` is not an instance of - :class:`~pymongo.database.Database`. - - Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern` - is not acknowledged. - - :param database: database to use. - :param bucket_name: The name of the bucket. Defaults to 'fs'. - :param chunk_size_bytes: The chunk size in bytes. Defaults - to 255KB. - :param write_concern: The - :class:`~pymongo.write_concern.WriteConcern` to use. If ``None`` - (the default) db.write_concern is used. - :param read_preference: The read preference to use. If - ``None`` (the default) db.read_preference is used. - - .. versionchanged:: 4.0 - Removed the `disable_md5` parameter. See - :ref:`removed-gridfs-checksum` for details. - - .. versionchanged:: 3.11 - Running a GridFSBucket operation in a transaction now always raises - an error. GridFSBucket does not support multi-document transactions. - - .. versionchanged:: 3.7 - Added the `disable_md5` parameter. - - .. versionadded:: 3.1 - - .. seealso:: The MongoDB documentation on `gridfs `_. - """ - if not isinstance(db, Database): - raise TypeError(f"database must be an instance of Database, not {type(db)}") - - db = _clear_entity_type_registry(db) - - wtc = write_concern if write_concern is not None else db.write_concern - if not wtc.acknowledged: - raise ConfigurationError("write concern must be acknowledged") - - self._bucket_name = bucket_name - self._collection = db[bucket_name] - self._chunks: Collection = self._collection.chunks.with_options( - write_concern=write_concern, read_preference=read_preference - ) - - self._files: Collection = self._collection.files.with_options( - write_concern=write_concern, read_preference=read_preference - ) - - self._chunk_size_bytes = chunk_size_bytes - self._timeout = db.client.options.timeout - - def open_upload_stream( - self, - filename: str, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[ClientSession] = None, - ) -> GridIn: - """Opens a Stream that the application can write the contents of the - file to. - - The user must specify the filename, and can choose to add any - additional information in the metadata field of the file document or - modify the chunk size. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - with fs.open_upload_stream( - "test_file", chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) as grid_in: - grid_in.write("data I want to store!") - # uploaded on close - - Returns an instance of :class:`~gridfs.grid_file.GridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param filename: The name of the file to upload. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - validate_string("filename", filename) - - opts = { - "filename": filename, - "chunk_size": ( - chunk_size_bytes if chunk_size_bytes is not None else self._chunk_size_bytes - ), - } - if metadata is not None: - opts["metadata"] = metadata - - return GridIn(self._collection, session=session, **opts) - - def open_upload_stream_with_id( - self, - file_id: Any, - filename: str, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[ClientSession] = None, - ) -> GridIn: - """Opens a Stream that the application can write the contents of the - file to. - - The user must specify the file id and filename, and can choose to add - any additional information in the metadata field of the file document - or modify the chunk size. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - with fs.open_upload_stream_with_id( - ObjectId(), - "test_file", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) as grid_in: - grid_in.write("data I want to store!") - # uploaded on close - - Returns an instance of :class:`~gridfs.grid_file.GridIn`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param file_id: The id to use for this file. The id must not have - already been used for another file. - :param filename: The name of the file to upload. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - validate_string("filename", filename) - - opts = { - "_id": file_id, - "filename": filename, - "chunk_size": ( - chunk_size_bytes if chunk_size_bytes is not None else self._chunk_size_bytes - ), - } - if metadata is not None: - opts["metadata"] = metadata - - return GridIn(self._collection, session=session, **opts) - - @_csot.apply - def upload_from_stream( - self, - filename: str, - source: Any, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[ClientSession] = None, - ) -> ObjectId: - """Uploads a user file to a GridFS bucket. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - file_id = fs.upload_from_stream( - "test_file", - "data I want to store!", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - Returns the _id of the uploaded file. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param filename: The name of the file to upload. - :param source: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - with self.open_upload_stream(filename, chunk_size_bytes, metadata, session=session) as gin: - gin.write(source) - - return cast(ObjectId, gin._id) - - @_csot.apply - def upload_from_stream_with_id( - self, - file_id: Any, - filename: str, - source: Any, - chunk_size_bytes: Optional[int] = None, - metadata: Optional[Mapping[str, Any]] = None, - session: Optional[ClientSession] = None, - ) -> None: - """Uploads a user file to a GridFS bucket with a custom file id. - - Reads the contents of the user file from `source` and uploads - it to the file `filename`. Source can be a string or file-like object. - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - file_id = fs.upload_from_stream( - ObjectId(), - "test_file", - "data I want to store!", - chunk_size_bytes=4, - metadata={"contentType": "text/plain"}) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - Raises :exc:`~ValueError` if `filename` is not a string. - - :param file_id: The id to use for this file. The id must not have - already been used for another file. - :param filename: The name of the file to upload. - :param source: The source stream of the content to be uploaded. Must be - a file-like object that implements :meth:`read` or a string. - :param chunk_size_bytes` (options): The number of bytes per chunk of this - file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. - :param metadata: User data for the 'metadata' field of the - files collection document. If not provided the metadata field will - be omitted from the files collection document. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - with self.open_upload_stream_with_id( - file_id, filename, chunk_size_bytes, metadata, session=session - ) as gin: - gin.write(source) - - def open_download_stream( - self, file_id: Any, session: Optional[ClientSession] = None - ) -> GridOut: - """Opens a Stream from which the application can read the contents of - the stored file specified by file_id. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # get _id of file to read. - file_id = fs.upload_from_stream("test_file", "data I want to store!") - grid_out = fs.open_download_stream(file_id) - contents = grid_out.read() - - Returns an instance of :class:`~gridfs.grid_file.GridOut`. - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be downloaded. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - gout = GridOut(self._collection, file_id, session=session) - - # Raise NoFile now, instead of on first attribute access. - gout.open() - return gout - - @_csot.apply - def download_to_stream( - self, file_id: Any, destination: Any, session: Optional[ClientSession] = None - ) -> None: - """Downloads the contents of the stored file specified by file_id and - writes the contents to `destination`. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get _id of file to read - file_id = fs.upload_from_stream("test_file", "data I want to store!") - # Get file to write to - file = open('myfile','wb+') - fs.download_to_stream(file_id, file) - file.seek(0) - contents = file.read() - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be downloaded. - :param destination: a file-like object implementing :meth:`write`. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - with self.open_download_stream(file_id, session=session) as gout: - while True: - chunk = gout.readchunk() - if not len(chunk): - break - destination.write(chunk) - - @_csot.apply - def delete(self, file_id: Any, session: Optional[ClientSession] = None) -> None: - """Given an file_id, delete this stored file's files collection document - and associated chunks from a GridFS bucket. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get _id of file to delete - file_id = fs.upload_from_stream("test_file", "data I want to store!") - fs.delete(file_id) - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be deleted. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - _disallow_transactions(session) - res = self._files.delete_one({"_id": file_id}, session=session) - self._chunks.delete_many({"files_id": file_id}, session=session) - if not res.deleted_count: - raise NoFile("no file could be deleted because none matched %s" % file_id) - - @_csot.apply - def delete_by_name(self, filename: str, session: Optional[ClientSession] = None) -> None: - """Given a filename, delete this stored file's files collection document(s) - and associated chunks from a GridFS bucket. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - fs.upload_from_stream("test_file", "data I want to store!") - fs.delete_by_name("test_file") - - Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. - - :param filename: The name of the file to be deleted. - :param session: a :class:`~pymongo.client_session.ClientSession` - - .. versionadded:: 4.12 - """ - _disallow_transactions(session) - files = self._files.find({"filename": filename}, {"_id": 1}, session=session) - file_ids = [file["_id"] for file in files] - res = self._files.delete_many({"_id": {"$in": file_ids}}, session=session) - self._chunks.delete_many({"files_id": {"$in": file_ids}}, session=session) - if not res.deleted_count: - raise NoFile(f"no file could be deleted because none matched filename {filename!r}") - - def find(self, *args: Any, **kwargs: Any) -> GridOutCursor: - """Find and return the files collection documents that match ``filter`` - - Returns a cursor that iterates across files matching - arbitrary queries on the files collection. Can be combined - with other modifiers for additional control. - - For example:: - - for grid_data in fs.find({"filename": "lisa.txt"}, - no_cursor_timeout=True): - data = grid_data.read() - - would iterate through all versions of "lisa.txt" stored in GridFS. - Note that setting no_cursor_timeout to True may be important to - prevent the cursor from timing out during long multi-file processing - work. - - As another example, the call:: - - most_recent_three = fs.find().sort("uploadDate", -1).limit(3) - - would return a cursor to the three most recently uploaded files - in GridFS. - - Follows a similar interface to - :meth:`~pymongo.collection.Collection.find` - in :class:`~pymongo.collection.Collection`. - - If a :class:`~pymongo.client_session.ClientSession` is passed to - :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances - are associated with that session. - - :param filter: Search query. - :param batch_size: The number of documents to return per - batch. - :param limit: The maximum number of documents to return. - :param no_cursor_timeout: The server normally times out idle - cursors after an inactivity period (10 minutes) to prevent excess - memory use. Set this option to True prevent that. - :param skip: The number of documents to skip before - returning. - :param sort: The order by which to sort results. Defaults to - None. - """ - return GridOutCursor(self._collection, *args, **kwargs) - - def open_download_stream_by_name( - self, filename: str, revision: int = -1, session: Optional[ClientSession] = None - ) -> GridOut: - """Opens a Stream from which the application can read the contents of - `filename` and optional `revision`. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - grid_out = fs.open_download_stream_by_name("test_file") - contents = grid_out.read() - - Returns an instance of :class:`~gridfs.grid_file.GridOut`. - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` filename is not a string. - - :param filename: The name of the file to read from. - :param revision: Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - :param session: a - :class:`~pymongo.client_session.ClientSession` - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - validate_string("filename", filename) - query = {"filename": filename} - _disallow_transactions(session) - cursor = self._files.find(query, session=session) - if revision < 0: - skip = abs(revision) - 1 - cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING) - else: - cursor.limit(-1).skip(revision).sort("uploadDate", ASCENDING) - try: - grid_file = next(cursor) - return GridOut(self._collection, file_document=grid_file, session=session) - except StopIteration: - raise NoFile("no version %d for filename %r" % (revision, filename)) from None - - @_csot.apply - def download_to_stream_by_name( - self, - filename: str, - destination: Any, - revision: int = -1, - session: Optional[ClientSession] = None, - ) -> None: - """Write the contents of `filename` (with optional `revision`) to - `destination`. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get file to write to - file = open('myfile','wb') - fs.download_to_stream_by_name("test_file", file) - - Raises :exc:`~gridfs.errors.NoFile` if no such version of - that file exists. - - Raises :exc:`~ValueError` if `filename` is not a string. - - :param filename: The name of the file to read from. - :param destination: A file-like object that implements :meth:`write`. - :param revision: Which revision (documents with the same - filename and different uploadDate) of the file to retrieve. - Defaults to -1 (the most recent revision). - :param session: a - :class:`~pymongo.client_session.ClientSession` - - :Note: Revision numbers are defined as follows: - - - 0 = the original stored file - - 1 = the first revision - - 2 = the second revision - - etc... - - -2 = the second most recent revision - - -1 = the most recent revision - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - with self.open_download_stream_by_name(filename, revision, session=session) as gout: - while True: - chunk = gout.readchunk() - if not len(chunk): - break - destination.write(chunk) - - def rename( - self, file_id: Any, new_filename: str, session: Optional[ClientSession] = None - ) -> None: - """Renames the stored file with the specified file_id. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - # Get _id of file to rename - file_id = fs.upload_from_stream("test_file", "data I want to store!") - fs.rename(file_id, "new_test_name") - - Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - - :param file_id: The _id of the file to be renamed. - :param new_filename: The new name of the file. - :param session: a - :class:`~pymongo.client_session.ClientSession` - - .. versionchanged:: 3.6 - Added ``session`` parameter. - """ - _disallow_transactions(session) - result = self._files.update_one( - {"_id": file_id}, {"$set": {"filename": new_filename}}, session=session - ) - if not result.matched_count: - raise NoFile( - "no files could be renamed %r because none " - "matched file_id %i" % (new_filename, file_id) - ) - - def rename_by_name( - self, filename: str, new_filename: str, session: Optional[ClientSession] = None - ) -> None: - """Renames the stored file with the specified filename. - - For example:: - - my_db = MongoClient().test - fs = GridFSBucket(my_db) - fs.upload_from_stream("test_file", "data I want to store!") - fs.rename_by_name("test_file", "new_test_name") - - Raises :exc:`~gridfs.errors.NoFile` if no file with the given filename exists. - - :param filename: The filename of the file to be renamed. - :param new_filename: The new name of the file. - :param session: a :class:`~pymongo.client_session.ClientSession` - - .. versionadded:: 4.12 - """ - _disallow_transactions(session) - result = self._files.update_many( - {"filename": filename}, {"$set": {"filename": new_filename}}, session=session - ) - if not result.matched_count: - raise NoFile( - f"no files could be renamed {new_filename!r} because none matched filename {filename!r}" - ) - - -class GridIn: - """Class to write data to GridFS.""" - - def __init__( - self, - root_collection: Collection, - session: Optional[ClientSession] = None, - **kwargs: Any, - ) -> None: - """Write a file to GridFS - - Application developers should generally not need to - instantiate this class directly - instead see the methods - provided by :class:`~gridfs.GridFS`. - - Raises :class:`TypeError` if `root_collection` is not an - instance of :class:`~pymongo.collection.Collection`. - - Any of the file level options specified in the `GridFS Spec - `_ may be passed as - keyword arguments. Any additional keyword arguments will be - set as additional fields on the file document. Valid keyword - arguments include: - - - ``"_id"``: unique ID for this file (default: - :class:`~bson.objectid.ObjectId`) - this ``"_id"`` must - not have already been used for another file - - - ``"filename"``: human name for the file - - - ``"contentType"`` or ``"content_type"``: valid mime-type - for the file - - - ``"chunkSize"`` or ``"chunk_size"``: size of each of the - chunks, in bytes (default: 255 kb) - - - ``"encoding"``: encoding used for this file. Any :class:`str` - that is written to the file will be converted to :class:`bytes`. - - :param root_collection: root collection to write to - :param session: a - :class:`~pymongo.client_session.ClientSession` to use for all - commands - :param kwargs: Any: file level options (see above) - - .. versionchanged:: 4.0 - Removed the `disable_md5` parameter. See - :ref:`removed-gridfs-checksum` for details. - - .. versionchanged:: 3.7 - Added the `disable_md5` parameter. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.0 - `root_collection` must use an acknowledged - :attr:`~pymongo.collection.Collection.write_concern` - """ - if not isinstance(root_collection, Collection): - raise TypeError( - f"root_collection must be an instance of Collection, not {type(root_collection)}" - ) - - if not root_collection.write_concern.acknowledged: - raise ConfigurationError("root_collection must use acknowledged write_concern") - _disallow_transactions(session) - - # Handle alternative naming - if "content_type" in kwargs: - kwargs["contentType"] = kwargs.pop("content_type") - if "chunk_size" in kwargs: - kwargs["chunkSize"] = kwargs.pop("chunk_size") - - coll = _clear_entity_type_registry(root_collection, read_preference=ReadPreference.PRIMARY) - - # Defaults - kwargs["_id"] = kwargs.get("_id", ObjectId()) - kwargs["chunkSize"] = kwargs.get("chunkSize", DEFAULT_CHUNK_SIZE) - object.__setattr__(self, "_session", session) - object.__setattr__(self, "_coll", coll) - object.__setattr__(self, "_chunks", coll.chunks) - object.__setattr__(self, "_file", kwargs) - object.__setattr__(self, "_buffer", io.BytesIO()) - object.__setattr__(self, "_position", 0) - object.__setattr__(self, "_chunk_number", 0) - object.__setattr__(self, "_closed", False) - object.__setattr__(self, "_ensured_index", False) - object.__setattr__(self, "_buffered_docs", []) - object.__setattr__(self, "_buffered_docs_size", 0) - - def _create_index(self, collection: Collection, index_key: Any, unique: bool) -> None: - doc = collection.find_one(projection={"_id": 1}, session=self._session) - if doc is None: - try: - index_keys = [ - index_spec["key"] - for index_spec in collection.list_indexes(session=self._session) - ] - except OperationFailure: - index_keys = [] - if index_key not in index_keys: - collection.create_index(index_key.items(), unique=unique, session=self._session) - - def _ensure_indexes(self) -> None: - if not object.__getattribute__(self, "_ensured_index"): - _disallow_transactions(self._session) - self._create_index(self._coll.files, _F_INDEX, False) - self._create_index(self._coll.chunks, _C_INDEX, True) - object.__setattr__(self, "_ensured_index", True) - - def abort(self) -> None: - """Remove all chunks/files that may have been uploaded and close.""" - self._coll.chunks.delete_many({"files_id": self._file["_id"]}, session=self._session) - self._coll.files.delete_one({"_id": self._file["_id"]}, session=self._session) - object.__setattr__(self, "_closed", True) - - @property - def closed(self) -> bool: - """Is this file closed?""" - return self._closed - - _id: Any = _grid_in_property("_id", "The ``'_id'`` value for this file.", read_only=True) - filename: Optional[str] = _grid_in_property("filename", "Name of this file.") - name: Optional[str] = _grid_in_property("filename", "Alias for `filename`.") - content_type: Optional[str] = _grid_in_property( - "contentType", "DEPRECATED, will be removed in PyMongo 5.0. Mime-type for this file." - ) - length: int = _grid_in_property("length", "Length (in bytes) of this file.", closed_only=True) - chunk_size: int = _grid_in_property("chunkSize", "Chunk size for this file.", read_only=True) - upload_date: datetime.datetime = _grid_in_property( - "uploadDate", "Date that this file was uploaded.", closed_only=True - ) - md5: Optional[str] = _grid_in_property( - "md5", - "DEPRECATED, will be removed in PyMongo 5.0. MD5 of the contents of this file if an md5 sum was created.", - closed_only=True, - ) - - _buffer: io.BytesIO - _closed: bool - _buffered_docs: list[dict[str, Any]] - _buffered_docs_size: int - - def __getattr__(self, name: str) -> Any: - if name == "_coll": - return object.__getattribute__(self, name) - elif name in self._file: - return self._file[name] - raise AttributeError("GridIn object has no attribute '%s'" % name) - - def __setattr__(self, name: str, value: Any) -> None: - # For properties of this instance like _buffer, or descriptors set on - # the class like filename, use regular __setattr__ - if name in self.__dict__ or name in self.__class__.__dict__: - object.__setattr__(self, name, value) - else: - # All other attributes are part of the document in db.fs.files. - # Store them to be sent to server on close() or if closed, send - # them now. - self._file[name] = value - if self._closed: - if _IS_SYNC: - self._coll.files.update_one({"_id": self._file["_id"]}, {"$set": {name: value}}) - else: - raise AttributeError( - "GridIn does not support __setattr__ after being closed(). Set the attribute before closing the file or use GridIn.set() instead" - ) - - def set(self, name: str, value: Any) -> None: - self._file[name] = value - if self._closed: - self._coll.files.update_one({"_id": self._file["_id"]}, {"$set": {name: value}}) - - def _flush_data(self, data: Any, force: bool = False) -> None: - """Flush `data` to a chunk.""" - self._ensure_indexes() - assert len(data) <= self.chunk_size - if data: - self._buffered_docs.append( - {"files_id": self._file["_id"], "n": self._chunk_number, "data": data} - ) - self._buffered_docs_size += len(data) + _CHUNK_OVERHEAD - if not self._buffered_docs: - return - # Limit to 100,000 chunks or 32MB (+1 chunk) of data. - if ( - force - or self._buffered_docs_size >= _UPLOAD_BUFFER_SIZE - or len(self._buffered_docs) >= _UPLOAD_BUFFER_CHUNKS - ): - try: - self._chunks.insert_many(self._buffered_docs, session=self._session) - except BulkWriteError as exc: - # For backwards compatibility, raise an insert_one style exception. - write_errors = exc.details["writeErrors"] - for err in write_errors: - if err.get("code") in (11000, 11001, 12582): # Duplicate key errors - self._raise_file_exists(self._file["_id"]) - result = {"writeErrors": write_errors} - wces = exc.details["writeConcernErrors"] - if wces: - result["writeConcernError"] = wces[-1] - _check_write_command_response(result) - raise - self._buffered_docs = [] - self._buffered_docs_size = 0 - self._chunk_number += 1 - self._position += len(data) - - def _flush_buffer(self, force: bool = False) -> None: - """Flush the buffer contents out to a chunk.""" - self._flush_data(self._buffer.getvalue(), force=force) - self._buffer.close() - self._buffer = io.BytesIO() - - def _flush(self) -> Any: - """Flush the file to the database.""" - try: - self._flush_buffer(force=True) - # The GridFS spec says length SHOULD be an Int64. - self._file["length"] = Int64(self._position) - self._file["uploadDate"] = datetime.datetime.now(tz=datetime.timezone.utc) - - return self._coll.files.insert_one(self._file, session=self._session) - except DuplicateKeyError: - self._raise_file_exists(self._id) - - def _raise_file_exists(self, file_id: Any) -> NoReturn: - """Raise a FileExists exception for the given file_id.""" - raise FileExists("file with _id %r already exists" % file_id) - - def close(self) -> None: - """Flush the file and close it. - - A closed file cannot be written any more. Calling - :meth:`close` more than once is allowed. - """ - if not self._closed: - self._flush() - object.__setattr__(self, "_closed", True) - - def read(self, size: int = -1) -> NoReturn: - raise io.UnsupportedOperation("read") - - def readable(self) -> bool: - return False - - def seekable(self) -> bool: - return False - - def write(self, data: Any) -> None: - """Write data to the file. There is no return value. - - `data` can be either a string of bytes or a file-like object - (implementing :meth:`read`). If the file has an - :attr:`encoding` attribute, `data` can also be a - :class:`str` instance, which will be encoded as - :attr:`encoding` before being written. - - Due to buffering, the data may not actually be written to the - database until the :meth:`close` method is called. Raises - :class:`ValueError` if this file is already closed. Raises - :class:`TypeError` if `data` is not an instance of - :class:`bytes`, a file-like object, or an instance of :class:`str`. - Unicode data is only allowed if the file has an :attr:`encoding` - attribute. - - :param data: string of bytes or file-like object to be written - to the file - """ - if self._closed: - raise ValueError("cannot write to a closed file") - - try: - # file-like - read = data.read - except AttributeError: - # string - if not isinstance(data, (str, bytes)): - raise TypeError("can only write strings or file-like objects") from None - if isinstance(data, str): - try: - data = data.encode(self.encoding) - except AttributeError: - raise TypeError( - "must specify an encoding for file in order to write str" - ) from None - read = io.BytesIO(data).read - - if inspect.iscoroutinefunction(read): - self._write_async(read) - else: - if self._buffer.tell() > 0: - # Make sure to flush only when _buffer is complete - space = self.chunk_size - self._buffer.tell() - if space: - try: - to_write = read(space) - except BaseException: - self.abort() - raise - self._buffer.write(to_write) - if len(to_write) < space: - return # EOF or incomplete - self._flush_buffer() - to_write = read(self.chunk_size) - while to_write and len(to_write) == self.chunk_size: - self._flush_data(to_write) - to_write = read(self.chunk_size) - self._buffer.write(to_write) - - def _write_async(self, read: Any) -> None: - if self._buffer.tell() > 0: - # Make sure to flush only when _buffer is complete - space = self.chunk_size - self._buffer.tell() - if space: - try: - to_write = read(space) - except BaseException: - self.abort() - raise - self._buffer.write(to_write) - if len(to_write) < space: - return # EOF or incomplete - self._flush_buffer() - to_write = read(self.chunk_size) - while to_write and len(to_write) == self.chunk_size: - self._flush_data(to_write) - to_write = read(self.chunk_size) - self._buffer.write(to_write) - - def writelines(self, sequence: Iterable[Any]) -> None: - """Write a sequence of strings to the file. - - Does not add separators. - """ - for line in sequence: - self.write(line) - - def writeable(self) -> bool: - return True - - def __enter__(self) -> GridIn: - """Support for the context manager protocol.""" - return self - - def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Any: - """Support for the context manager protocol. - - Close the file if no exceptions occur and allow exceptions to propagate. - """ - if exc_type is None: - # No exceptions happened. - self.close() - else: - # Something happened, at minimum mark as closed. - object.__setattr__(self, "_closed", True) - - # propagate exceptions - return False - - -GRIDOUT_BASE_CLASS = io.IOBase if _IS_SYNC else object # type: Any - - -class GridOut(GRIDOUT_BASE_CLASS): # type: ignore - - """Class to read data out of GridFS.""" - - def __init__( - self, - root_collection: Collection, - file_id: Optional[int] = None, - file_document: Optional[Any] = None, - session: Optional[ClientSession] = None, - ) -> None: - """Read a file from GridFS - - Application developers should generally not need to - instantiate this class directly - instead see the methods - provided by :class:`~gridfs.GridFS`. - - Either `file_id` or `file_document` must be specified, - `file_document` will be given priority if present. Raises - :class:`TypeError` if `root_collection` is not an instance of - :class:`~pymongo.collection.Collection`. - - :param root_collection: root collection to read from - :param file_id: value of ``"_id"`` for the file to read - :param file_document: file document from - `root_collection.files` - :param session: a - :class:`~pymongo.client_session.ClientSession` to use for all - commands - - .. versionchanged:: 3.8 - For better performance and to better follow the GridFS spec, - :class:`GridOut` now uses a single cursor to read all the chunks in - the file. - - .. versionchanged:: 3.6 - Added ``session`` parameter. - - .. versionchanged:: 3.0 - Creating a GridOut does not immediately retrieve the file metadata - from the server. Metadata is fetched when first needed. - """ - if not isinstance(root_collection, Collection): - raise TypeError( - f"root_collection must be an instance of Collection, not {type(root_collection)}" - ) - _disallow_transactions(session) - - root_collection = _clear_entity_type_registry(root_collection) - - super().__init__() - - self._chunks = root_collection.chunks - self._files = root_collection.files - self._file_id = file_id - self._buffer = EMPTY - # Start position within the current buffered chunk. - self._buffer_pos = 0 - self._chunk_iter = None - # Position within the total file. - self._position = 0 - self._file = file_document - self._session = session - if not _IS_SYNC: - self.closed = False - - _id: Any = _grid_out_property("_id", "The ``'_id'`` value for this file.") - filename: str = _grid_out_property("filename", "Name of this file.") - name: str = _grid_out_property("filename", "Alias for `filename`.") - content_type: Optional[str] = _grid_out_property( - "contentType", "DEPRECATED, will be removed in PyMongo 5.0. Mime-type for this file." - ) - length: int = _grid_out_property("length", "Length (in bytes) of this file.") - chunk_size: int = _grid_out_property("chunkSize", "Chunk size for this file.") - upload_date: datetime.datetime = _grid_out_property( - "uploadDate", "Date that this file was first uploaded." - ) - aliases: Optional[list[str]] = _grid_out_property( - "aliases", "DEPRECATED, will be removed in PyMongo 5.0. List of aliases for this file." - ) - metadata: Optional[Mapping[str, Any]] = _grid_out_property( - "metadata", "Metadata attached to this file." - ) - md5: Optional[str] = _grid_out_property( - "md5", - "DEPRECATED, will be removed in PyMongo 5.0. MD5 of the contents of this file if an md5 sum was created.", - ) - - _file: Any - _chunk_iter: Any - - if not _IS_SYNC: - closed: bool - - def __next__(self) -> bytes: - line = self.readline() - if line: - return line - raise StopIteration() - - def to_list(self) -> list[bytes]: - return [x for x in self] # noqa: C416, RUF100 - - def readline(self, size: int = -1) -> bytes: - """Read one line or up to `size` bytes from the file. - - :param size: the maximum number of bytes to read - """ - return self._read_size_or_line(size=size, line=True) - - def readlines(self, size: int = -1) -> list[bytes]: - """Read one line or up to `size` bytes from the file. - - :param size: the maximum number of bytes to read - """ - self.open() - lines = [] - remainder = int(self.length) - self._position - bytes_read = 0 - while remainder > 0: - line = self._read_size_or_line(line=True) - bytes_read += len(line) - lines.append(line) - remainder = int(self.length) - self._position - if 0 < size < bytes_read: - break - - return lines - - def open(self) -> None: - if not self._file: - _disallow_transactions(self._session) - self._file = self._files.find_one({"_id": self._file_id}, session=self._session) - if not self._file: - raise NoFile( - f"no file in gridfs collection {self._files!r} with _id {self._file_id!r}" - ) - - def __getattr__(self, name: str) -> Any: - if _IS_SYNC: - self.open() # type: ignore[unused-coroutine] - elif not self._file: - raise InvalidOperation( - "You must call GridOut.open() before accessing the %s property" % name - ) - if name in self._file: - return self._file[name] - raise AttributeError("GridOut object has no attribute '%s'" % name) - - def readable(self) -> bool: - return True - - def readchunk(self) -> bytes: - """Reads a chunk at a time. If the current position is within a - chunk the remainder of the chunk is returned. - """ - self.open() - received = len(self._buffer) - self._buffer_pos - chunk_data = EMPTY - chunk_size = int(self.chunk_size) - - if received > 0: - chunk_data = self._buffer[self._buffer_pos :] - elif self._position < int(self.length): - chunk_number = int((received + self._position) / chunk_size) - if self._chunk_iter is None: - self._chunk_iter = GridOutChunkIterator( - self, self._chunks, self._session, chunk_number - ) - - chunk = self._chunk_iter.next() - chunk_data = chunk["data"][self._position % chunk_size :] - - if not chunk_data: - raise CorruptGridFile("truncated chunk") - - self._position += len(chunk_data) - self._buffer = EMPTY - self._buffer_pos = 0 - return chunk_data - - def _read_size_or_line(self, size: int = -1, line: bool = False) -> bytes: - """Internal read() and readline() helper.""" - self.open() - remainder = int(self.length) - self._position - if size < 0 or size > remainder: - size = remainder - - if size == 0: - return EMPTY - - received = 0 - data = [] - while received < size: - needed = size - received - if self._buffer: - # Optimization: Read the buffer with zero byte copies. - buf = self._buffer - chunk_start = self._buffer_pos - chunk_data = memoryview(buf)[self._buffer_pos :] - self._buffer = EMPTY - self._buffer_pos = 0 - self._position += len(chunk_data) - else: - buf = self.readchunk() - chunk_start = 0 - chunk_data = memoryview(buf) - if line: - pos = buf.find(NEWLN, chunk_start, chunk_start + needed) - chunk_start - if pos >= 0: - # Decrease size to exit the loop. - size = received + pos + 1 - needed = pos + 1 - if len(chunk_data) > needed: - data.append(chunk_data[:needed]) - # Optimization: Save the buffer with zero byte copies. - self._buffer = buf - self._buffer_pos = chunk_start + needed - self._position -= len(self._buffer) - self._buffer_pos - else: - data.append(chunk_data) - received += len(chunk_data) - - # Detect extra chunks after reading the entire file. - if size == remainder and self._chunk_iter: - try: - self._chunk_iter.next() - except StopIteration: - pass - - return b"".join(data) - - def read(self, size: int = -1) -> bytes: - """Read at most `size` bytes from the file (less if there - isn't enough data). - - The bytes are returned as an instance of :class:`bytes` - If `size` is negative or omitted all data is read. - - :param size: the number of bytes to read - - .. versionchanged:: 3.8 - This method now only checks for extra chunks after reading the - entire file. Previously, this method would check for extra chunks - on every call. - """ - return self._read_size_or_line(size=size) - - def tell(self) -> int: - """Return the current position of this file.""" - return self._position - - def seek(self, pos: int, whence: int = _SEEK_SET) -> int: - """Set the current position of this file. - - :param pos: the position (or offset if using relative - positioning) to seek to - :param whence: where to seek - from. :attr:`os.SEEK_SET` (``0``) for absolute file - positioning, :attr:`os.SEEK_CUR` (``1``) to seek relative - to the current position, :attr:`os.SEEK_END` (``2``) to - seek relative to the file's end. - - .. versionchanged:: 4.1 - The method now returns the new position in the file, to - conform to the behavior of :meth:`io.IOBase.seek`. - """ - if whence == _SEEK_SET: - new_pos = pos - elif whence == _SEEK_CUR: - new_pos = self._position + pos - elif whence == _SEEK_END: - new_pos = int(self.length) + pos - else: - raise OSError(22, "Invalid value for `whence`") - - if new_pos < 0: - raise OSError(22, "Invalid value for `pos` - must be positive") - - # Optimization, continue using the same buffer and chunk iterator. - if new_pos == self._position: - return new_pos - - self._position = new_pos - self._buffer = EMPTY - self._buffer_pos = 0 - if self._chunk_iter: - self._chunk_iter.close() - self._chunk_iter = None - return new_pos - - def seekable(self) -> bool: - return True - - def __iter__(self) -> GridOut: - """Return an iterator over all of this file's data. - - The iterator will return lines (delimited by ``b'\\n'``) of - :class:`bytes`. This can be useful when serving files - using a webserver that handles such an iterator efficiently. - - .. versionchanged:: 3.8 - The iterator now raises :class:`CorruptGridFile` when encountering - any truncated, missing, or extra chunk in a file. The previous - behavior was to only raise :class:`CorruptGridFile` on a missing - chunk. - - .. versionchanged:: 4.0 - The iterator now iterates over *lines* in the file, instead - of chunks, to conform to the base class :py:class:`io.IOBase`. - Use :meth:`GridOut.readchunk` to read chunk by chunk instead - of line by line. - """ - return self - - def close(self) -> None: - """Make GridOut more generically file-like.""" - if self._chunk_iter: - self._chunk_iter.close() - self._chunk_iter = None - if _IS_SYNC: - super().close() - else: - self.closed = True - - def write(self, value: Any) -> NoReturn: - raise io.UnsupportedOperation("write") - - def writelines(self, lines: Any) -> NoReturn: - raise io.UnsupportedOperation("writelines") - - def writable(self) -> bool: - return False - - def __enter__(self) -> GridOut: - """Makes it possible to use :class:`GridOut` files - with the async context manager protocol. - """ - return self - - def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Any: - """Makes it possible to use :class:`GridOut` files - with the async context manager protocol. - """ - self.close() - return False - - def fileno(self) -> NoReturn: - raise io.UnsupportedOperation("fileno") - - def flush(self) -> None: - # GridOut is read-only, so flush does nothing. - pass - - def isatty(self) -> bool: - return False - - def truncate(self, size: Optional[int] = None) -> NoReturn: - # See https://docs.python.org/3/library/io.html#io.IOBase.writable - # for why truncate has to raise. - raise io.UnsupportedOperation("truncate") - - # Override IOBase.__del__ otherwise it will lead to __getattr__ on - # __IOBase_closed which calls _ensure_file and potentially performs I/O. - # We cannot do I/O in __del__ since it can lead to a deadlock. - def __del__(self) -> None: - pass - - -class GridOutChunkIterator: - """Iterates over a file's chunks using a single cursor. - - Raises CorruptGridFile when encountering any truncated, missing, or extra - chunk in a file. - """ - - def __init__( - self, - grid_out: GridOut, - chunks: Collection, - session: Optional[ClientSession], - next_chunk: Any, - ) -> None: - self._id = grid_out._id - self._chunk_size = int(grid_out.chunk_size) - self._length = int(grid_out.length) - self._chunks = chunks - self._session = session - self._next_chunk = next_chunk - self._num_chunks = math.ceil(float(self._length) / self._chunk_size) - self._cursor = None - - _cursor: Optional[Cursor] - - def expected_chunk_length(self, chunk_n: int) -> int: - if chunk_n < self._num_chunks - 1: - return self._chunk_size - return self._length - (self._chunk_size * (self._num_chunks - 1)) - - def __iter__(self) -> GridOutChunkIterator: - return self - - def _create_cursor(self) -> None: - filter = {"files_id": self._id} - if self._next_chunk > 0: - filter["n"] = {"$gte": self._next_chunk} - _disallow_transactions(self._session) - self._cursor = self._chunks.find(filter, sort=[("n", 1)], session=self._session) - - def _next_with_retry(self) -> Mapping[str, Any]: - """Return the next chunk and retry once on CursorNotFound. - - We retry on CursorNotFound to maintain backwards compatibility in - cases where two calls to read occur more than 10 minutes apart (the - server's default cursor timeout). - """ - if self._cursor is None: - self._create_cursor() - assert self._cursor is not None - try: - return self._cursor.next() - except CursorNotFound: - self._cursor.close() - self._create_cursor() - return self._cursor.next() - - def next(self) -> Mapping[str, Any]: - try: - chunk = self._next_with_retry() - except StopIteration: - if self._next_chunk >= self._num_chunks: - raise - raise CorruptGridFile("no chunk #%d" % self._next_chunk) from None - - if chunk["n"] != self._next_chunk: - self.close() - raise CorruptGridFile( - "Missing chunk: expected chunk #%d but found " - "chunk with n=%d" % (self._next_chunk, chunk["n"]) - ) - - if chunk["n"] >= self._num_chunks: - # According to spec, ignore extra chunks if they are empty. - if len(chunk["data"]): - self.close() - raise CorruptGridFile( - "Extra chunk found: expected %d chunks but found " - "chunk with n=%d" % (self._num_chunks, chunk["n"]) - ) - - expected_length = self.expected_chunk_length(chunk["n"]) - if len(chunk["data"]) != expected_length: - self.close() - raise CorruptGridFile( - "truncated chunk #%d: expected chunk length to be %d but " - "found chunk with length %d" % (chunk["n"], expected_length, len(chunk["data"])) - ) - - self._next_chunk += 1 - return chunk - - __next__ = next - - def close(self) -> None: - if self._cursor: - self._cursor.close() - self._cursor = None - - -class GridOutIterator: - def __init__(self, grid_out: GridOut, chunks: Collection, session: ClientSession): - self._chunk_iter = GridOutChunkIterator(grid_out, chunks, session, 0) - - def __iter__(self) -> GridOutIterator: - return self - - def next(self) -> bytes: - chunk = self._chunk_iter.next() - return bytes(chunk["data"]) - - __next__ = next - - -class GridOutCursor(Cursor): - """A cursor / iterator for returning GridOut objects as the result - of an arbitrary query against the GridFS files collection. - """ - - def __init__( - self, - collection: Collection, - filter: Optional[Mapping[str, Any]] = None, - skip: int = 0, - limit: int = 0, - no_cursor_timeout: bool = False, - sort: Optional[Any] = None, - batch_size: int = 0, - session: Optional[ClientSession] = None, - ) -> None: - """Create a new cursor, similar to the normal - :class:`~pymongo.cursor.Cursor`. - - Should not be called directly by application developers - see - the :class:`~gridfs.GridFS` method :meth:`~gridfs.GridFS.find` instead. - - .. versionadded 2.7 - - .. seealso:: The MongoDB documentation on `cursors `_. - """ - _disallow_transactions(session) - collection = _clear_entity_type_registry(collection) - - # Hold on to the base "fs" collection to create GridOut objects later. - self._root_collection = collection - - super().__init__( - collection.files, - filter, - skip=skip, - limit=limit, - no_cursor_timeout=no_cursor_timeout, - sort=sort, - batch_size=batch_size, - session=session, - ) - - def next(self) -> GridOut: - """Get next GridOut object from cursor.""" - _disallow_transactions(self.session) - next_file = super().next() - return GridOut(self._root_collection, file_document=next_file, session=self.session) - - def to_list(self, length: Optional[int] = None) -> list[GridOut]: - """Convert the cursor to a list.""" - if length is None: - return [x for x in self] # noqa: C416,RUF100 - if length < 1: - raise ValueError("to_list() length must be greater than 0") - ret = [] - for _ in range(length): - ret.append(self.next()) - return ret - - __next__ = next - - def add_option(self, *args: Any, **kwargs: Any) -> NoReturn: - raise NotImplementedError("Method does not exist for GridOutCursor") - - def remove_option(self, *args: Any, **kwargs: Any) -> NoReturn: - raise NotImplementedError("Method does not exist for GridOutCursor") - - def _clone_base(self, session: Optional[ClientSession]) -> GridOutCursor: - """Creates an empty GridOutCursor for information to be copied into.""" - return GridOutCursor(self._root_collection, session=session) diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA deleted file mode 100644 index 8a2f6390..00000000 --- a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/METADATA +++ /dev/null @@ -1,202 +0,0 @@ -Metadata-Version: 2.4 -Name: h11 -Version: 0.16.0 -Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 -Home-page: https://github.com/python-hyper/h11 -Author: Nathaniel J. Smith -Author-email: njs@pobox.com -License: MIT -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Topic :: System :: Networking -Requires-Python: >=3.8 -License-File: LICENSE.txt -Dynamic: author -Dynamic: author-email -Dynamic: classifier -Dynamic: description -Dynamic: home-page -Dynamic: license -Dynamic: license-file -Dynamic: requires-python -Dynamic: summary - -h11 -=== - -.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master - :target: https://travis-ci.org/python-hyper/h11 - :alt: Automated test status - -.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg - :target: https://codecov.io/gh/python-hyper/h11 - :alt: Test coverage - -.. image:: https://readthedocs.org/projects/h11/badge/?version=latest - :target: http://h11.readthedocs.io/en/latest/?badge=latest - :alt: Documentation Status - -This is a little HTTP/1.1 library written from scratch in Python, -heavily inspired by `hyper-h2 `_. - -It's a "bring-your-own-I/O" library; h11 contains no IO code -whatsoever. This means you can hook h11 up to your favorite network -API, and that could be anything you want: synchronous, threaded, -asynchronous, or your own implementation of `RFC 6214 -`_ -- h11 won't judge you. -(Compare this to the current state of the art, where every time a `new -network API `_ comes along then someone -gets to start over reimplementing the entire HTTP protocol from -scratch.) Cory Benfield made an `excellent blog post describing the -benefits of this approach -`_, or if you like video -then here's his `PyCon 2016 talk on the same theme -`_. - -This also means that h11 is not immediately useful out of the box: -it's a toolkit for building programs that speak HTTP, not something -that could directly replace ``requests`` or ``twisted.web`` or -whatever. But h11 makes it much easier to implement something like -``requests`` or ``twisted.web``. - -At a high level, working with h11 goes like this: - -1) First, create an ``h11.Connection`` object to track the state of a - single HTTP/1.1 connection. - -2) When you read data off the network, pass it to - ``conn.receive_data(...)``; you'll get back a list of objects - representing high-level HTTP "events". - -3) When you want to send a high-level HTTP event, create the - corresponding "event" object and pass it to ``conn.send(...)``; - this will give you back some bytes that you can then push out - through the network. - -For example, a client might instantiate and then send a -``h11.Request`` object, then zero or more ``h11.Data`` objects for the -request body (e.g., if this is a POST), and then a -``h11.EndOfMessage`` to indicate the end of the message. Then the -server would then send back a ``h11.Response``, some ``h11.Data``, and -its own ``h11.EndOfMessage``. If either side violates the protocol, -you'll get a ``h11.ProtocolError`` exception. - -h11 is suitable for implementing both servers and clients, and has a -pleasantly symmetric API: the events you send as a client are exactly -the ones that you receive as a server and vice-versa. - -`Here's an example of a tiny HTTP client -`_ - -It also has `a fine manual `_. - -FAQ ---- - -*Whyyyyy?* - -I wanted to play with HTTP in `Curio -`__ and `Trio -`__, which at the time didn't have any -HTTP libraries. So I thought, no big deal, Python has, like, a dozen -different implementations of HTTP, surely I can find one that's -reusable. I didn't find one, but I did find Cory's call-to-arms -blog-post. So I figured, well, fine, if I have to implement HTTP from -scratch, at least I can make sure no-one *else* has to ever again. - -*Should I use it?* - -Maybe. You should be aware that it's a very young project. But, it's -feature complete and has an exhaustive test-suite and complete docs, -so the next step is for people to try using it and see how it goes -:-). If you do then please let us know -- if nothing else we'll want -to talk to you before making any incompatible changes! - -*What are the features/limitations?* - -Roughly speaking, it's trying to be a robust, complete, and non-hacky -implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: -HTTP/1.1 Message Syntax and Routing -`_. That is, it mostly focuses on -implementing HTTP at the level of taking bytes on and off the wire, -and the headers related to that, and tries to be anal about spec -conformance. It doesn't know about higher-level concerns like URL -routing, conditional GETs, cross-origin cookie policies, or content -negotiation. But it does know how to take care of framing, -cross-version differences in keep-alive handling, and the "obsolete -line folding" rule, so you can focus your energies on the hard / -interesting parts for your application, and it tries to support the -full specification in the sense that any useful HTTP/1.1 conformant -application should be able to use h11. - -It's pure Python, and has no dependencies outside of the standard -library. - -It has a test suite with 100.0% coverage for both statements and -branches. - -Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3. -The last Python 2-compatible version was h11 0.11.x. -(Originally it had a Cython wrapper for `http-parser -`_ and a beautiful nested state -machine implemented with ``yield from`` to postprocess the output. But -I had to take these out -- the new *parser* needs fewer lines-of-code -than the old *parser wrapper*, is written in pure Python, uses no -exotic language syntax, and has more features. It's sad, really; that -old state machine was really slick. I just need a few sentences here -to mourn that.) - -I don't know how fast it is. I haven't benchmarked or profiled it yet, -so it's probably got a few pointless hot spots, and I've been trying -to err on the side of simplicity and robustness instead of -micro-optimization. But at the architectural level I tried hard to -avoid fundamentally bad decisions, e.g., I believe that all the -parsing algorithms remain linear-time even in the face of pathological -input like slowloris, and there are no byte-by-byte loops. (I also -believe that it maintains bounded memory usage in the face of -arbitrary/pathological input.) - -The whole library is ~800 lines-of-code. You can read and understand -the whole thing in less than an hour. Most of the energy invested in -this so far has been spent on trying to keep things simple by -minimizing special-cases and ad hoc state manipulation; even though it -is now quite small and simple, I'm still annoyed that I haven't -figured out how to make it even smaller and simpler. (Unfortunately, -HTTP does not lend itself to simplicity.) - -The API is ~feature complete and I don't expect the general outlines -to change much, but you can't judge an API's ergonomics until you -actually document and use it, so I'd expect some changes in the -details. - -*How do I try it?* - -.. code-block:: sh - - $ pip install h11 - $ git clone git@github.com:python-hyper/h11 - $ cd h11/examples - $ python basic-client.py - -and go from there. - -*License?* - -MIT - -*Code of conduct?* - -Contributors are requested to follow our `code of conduct -`_ in -all project spaces. diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD deleted file mode 100644 index a8f8e63f..00000000 --- a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/RECORD +++ /dev/null @@ -1,29 +0,0 @@ -h11-0.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348 -h11-0.16.0.dist-info/RECORD,, -h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 -h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 -h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 -h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 -h11/__pycache__/__init__.cpython-312.pyc,, -h11/__pycache__/_abnf.cpython-312.pyc,, -h11/__pycache__/_connection.cpython-312.pyc,, -h11/__pycache__/_events.cpython-312.pyc,, -h11/__pycache__/_headers.cpython-312.pyc,, -h11/__pycache__/_readers.cpython-312.pyc,, -h11/__pycache__/_receivebuffer.cpython-312.pyc,, -h11/__pycache__/_state.cpython-312.pyc,, -h11/__pycache__/_util.cpython-312.pyc,, -h11/__pycache__/_version.cpython-312.pyc,, -h11/__pycache__/_writers.cpython-312.pyc,, -h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 -h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863 -h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792 -h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412 -h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590 -h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 -h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231 -h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 -h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686 -h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 -h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL deleted file mode 100644 index 1eb3c49d..00000000 --- a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (78.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 8f080eae..00000000 --- a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Nathaniel J. Smith and other contributors - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt deleted file mode 100644 index 0d24def7..00000000 --- a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -h11 diff --git a/venv/lib/python3.12/site-packages/h11/__init__.py b/venv/lib/python3.12/site-packages/h11/__init__.py deleted file mode 100644 index 989e92c3..00000000 --- a/venv/lib/python3.12/site-packages/h11/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), -# containing no networking code at all, loosely modelled on hyper-h2's generic -# implementation of HTTP/2 (and in particular the h2.connection.H2Connection -# class). There's still a bunch of subtle details you need to get right if you -# want to make this actually useful, because it doesn't implement all the -# semantics to check that what you're asking to write to the wire is sensible, -# but at least it gets you out of dealing with the wire itself. - -from h11._connection import Connection, NEED_DATA, PAUSED -from h11._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from h11._state import ( - CLIENT, - CLOSED, - DONE, - ERROR, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError -from h11._version import __version__ - -PRODUCT_ID = "python-h11/" + __version__ - - -__all__ = ( - "Connection", - "NEED_DATA", - "PAUSED", - "ConnectionClosed", - "Data", - "EndOfMessage", - "Event", - "InformationalResponse", - "Request", - "Response", - "CLIENT", - "CLOSED", - "DONE", - "ERROR", - "IDLE", - "MUST_CLOSE", - "SEND_BODY", - "SEND_RESPONSE", - "SERVER", - "SWITCHED_PROTOCOL", - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", -) diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c4ff74bd..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc deleted file mode 100644 index ffd74cfa..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc deleted file mode 100644 index 58061e8f..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc deleted file mode 100644 index c31ede22..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc deleted file mode 100644 index 2678319e..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc deleted file mode 100644 index 1a3439eb..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc deleted file mode 100644 index b820d83c..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc deleted file mode 100644 index 031d90b6..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc deleted file mode 100644 index 7ff63d9d..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc deleted file mode 100644 index faacf1a7..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc deleted file mode 100644 index ccfae713..00000000 Binary files a/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/h11/_abnf.py b/venv/lib/python3.12/site-packages/h11/_abnf.py deleted file mode 100644 index 933587fb..00000000 --- a/venv/lib/python3.12/site-packages/h11/_abnf.py +++ /dev/null @@ -1,132 +0,0 @@ -# We use native strings for all the re patterns, to take advantage of string -# formatting, and then convert to bytestrings when compiling the final re -# objects. - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace -# OWS = *( SP / HTAB ) -# ; optional whitespace -OWS = r"[ \t]*" - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators -# token = 1*tchar -# -# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" -# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" -# / DIGIT / ALPHA -# ; any VCHAR, except delimiters -token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields -# field-name = token -field_name = token - -# The standard says: -# -# field-value = *( field-content / obs-fold ) -# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] -# field-vchar = VCHAR / obs-text -# obs-fold = CRLF 1*( SP / HTAB ) -# ; obsolete line folding -# ; see Section 3.2.4 -# -# https://tools.ietf.org/html/rfc5234#appendix-B.1 -# -# VCHAR = %x21-7E -# ; visible (printing) characters -# -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string -# obs-text = %x80-FF -# -# However, the standard definition of field-content is WRONG! It disallows -# fields containing a single visible character surrounded by whitespace, -# e.g. "foo a bar". -# -# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 -# -# So our definition of field_content attempts to fix it up... -# -# Also, we allow lots of control characters, because apparently people assume -# that they're legal in practice (e.g., google analytics makes cookies with -# \x01 in them!): -# https://github.com/python-hyper/h11/issues/57 -# We still don't allow NUL or whitespace, because those are often treated as -# meta-characters and letting them through can lead to nasty issues like SSRF. -vchar = r"[\x21-\x7e]" -vchar_or_obs_text = r"[^\x00\s]" -field_vchar = vchar_or_obs_text -field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) - -# We handle obs-fold at a different level, and our fixed-up field_content -# already grows to swallow the whole value, so ? instead of * -field_value = r"({field_content})?".format(**globals()) - -# header-field = field-name ":" OWS field-value OWS -header_field = ( - r"(?P{field_name})" - r":" - r"{OWS}" - r"(?P{field_value})" - r"{OWS}".format(**globals()) -) - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line -# -# request-line = method SP request-target SP HTTP-version CRLF -# method = token -# HTTP-version = HTTP-name "/" DIGIT "." DIGIT -# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive -# -# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full -# URL, host+port (for connect), or even "*", but in any case we are guaranteed -# that it contists of the visible printing characters. -method = token -request_target = r"{vchar}+".format(**globals()) -http_version = r"HTTP/(?P[0-9]\.[0-9])" -request_line = ( - r"(?P{method})" - r" " - r"(?P{request_target})" - r" " - r"{http_version}".format(**globals()) -) - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line -# -# status-line = HTTP-version SP status-code SP reason-phrase CRLF -# status-code = 3DIGIT -# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) -status_code = r"[0-9]{3}" -reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) -status_line = ( - r"{http_version}" - r" " - r"(?P{status_code})" - # However, there are apparently a few too many servers out there that just - # leave out the reason phrase: - # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 - # https://github.com/seanmonstar/httparse/issues/29 - # so make it optional. ?: is a non-capturing group. - r"(?: (?P{reason_phrase}))?".format(**globals()) -) - -HEXDIG = r"[0-9A-Fa-f]" -# Actually -# -# chunk-size = 1*HEXDIG -# -# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 -chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) -# Actually -# -# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) -# -# but we aren't parsing the things so we don't really care. -chunk_ext = r";.*" -chunk_header = ( - r"(?P{chunk_size})" - r"(?P{chunk_ext})?" - r"{OWS}\r\n".format( - **globals() - ) # Even though the specification does not allow for extra whitespaces, - # we are lenient with trailing whitespaces because some servers on the wild use it. -) diff --git a/venv/lib/python3.12/site-packages/h11/_connection.py b/venv/lib/python3.12/site-packages/h11/_connection.py deleted file mode 100644 index e37d82a8..00000000 --- a/venv/lib/python3.12/site-packages/h11/_connection.py +++ /dev/null @@ -1,659 +0,0 @@ -# This contains the main Connection class. Everything in h11 revolves around -# this. -from typing import ( - Any, - Callable, - cast, - Dict, - List, - Optional, - overload, - Tuple, - Type, - Union, -) - -from ._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from ._headers import get_comma_header, has_expect_100_continue, set_comma_header -from ._readers import READERS, ReadersType -from ._receivebuffer import ReceiveBuffer -from ._state import ( - _SWITCH_CONNECT, - _SWITCH_UPGRADE, - CLIENT, - ConnectionState, - DONE, - ERROR, - MIGHT_SWITCH_PROTOCOL, - SEND_BODY, - SERVER, - SWITCHED_PROTOCOL, -) -from ._util import ( # Import the internal things we need - LocalProtocolError, - RemoteProtocolError, - Sentinel, -) -from ._writers import WRITERS, WritersType - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = ["Connection", "NEED_DATA", "PAUSED"] - - -class NEED_DATA(Sentinel, metaclass=Sentinel): - pass - - -class PAUSED(Sentinel, metaclass=Sentinel): - pass - - -# If we ever have this much buffered without it making a complete parseable -# event, we error out. The only time we really buffer is when reading the -# request/response line + headers together, so this is effectively the limit on -# the size of that. -# -# Some precedents for defaults: -# - node.js: 80 * 1024 -# - tomcat: 8 * 1024 -# - IIS: 16 * 1024 -# - Apache: <8 KiB per line> -DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 - - -# RFC 7230's rules for connection lifecycles: -# - If either side says they want to close the connection, then the connection -# must close. -# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close -# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive -# (and even this is a mess -- e.g. if you're implementing a proxy then -# sending Connection: keep-alive is forbidden). -# -# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So -# our rule is: -# - If someone says Connection: close, we will close -# - If someone uses HTTP/1.0, we will close. -def _keep_alive(event: Union[Request, Response]) -> bool: - connection = get_comma_header(event.headers, b"connection") - if b"close" in connection: - return False - if getattr(event, "http_version", b"1.1") < b"1.1": - return False - return True - - -def _body_framing( - request_method: bytes, event: Union[Request, Response] -) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: - # Called when we enter SEND_BODY to figure out framing information for - # this body. - # - # These are the only two events that can trigger a SEND_BODY state: - assert type(event) in (Request, Response) - # Returns one of: - # - # ("content-length", count) - # ("chunked", ()) - # ("http/1.0", ()) - # - # which are (lookup key, *args) for constructing body reader/writer - # objects. - # - # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 - # - # Step 1: some responses always have an empty body, regardless of what the - # headers say. - if type(event) is Response: - if ( - event.status_code in (204, 304) - or request_method == b"HEAD" - or (request_method == b"CONNECT" and 200 <= event.status_code < 300) - ): - return ("content-length", (0,)) - # Section 3.3.3 also lists another case -- responses with status_code - # < 200. For us these are InformationalResponses, not Responses, so - # they can't get into this function in the first place. - assert event.status_code >= 200 - - # Step 2: check for Transfer-Encoding (T-E beats C-L): - transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") - if transfer_encodings: - assert transfer_encodings == [b"chunked"] - return ("chunked", ()) - - # Step 3: check for Content-Length - content_lengths = get_comma_header(event.headers, b"content-length") - if content_lengths: - return ("content-length", (int(content_lengths[0]),)) - - # Step 4: no applicable headers; fallback/default depends on type - if type(event) is Request: - return ("content-length", (0,)) - else: - return ("http/1.0", ()) - - -################################################################ -# -# The main Connection class -# -################################################################ - - -class Connection: - """An object encapsulating the state of an HTTP connection. - - Args: - our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If - you're implementing a server, pass :data:`h11.SERVER`. - - max_incomplete_event_size (int): - The maximum number of bytes we're willing to buffer of an - incomplete event. In practice this mostly sets a limit on the - maximum size of the request/response line + headers. If this is - exceeded, then :meth:`next_event` will raise - :exc:`RemoteProtocolError`. - - """ - - def __init__( - self, - our_role: Type[Sentinel], - max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, - ) -> None: - self._max_incomplete_event_size = max_incomplete_event_size - # State and role tracking - if our_role not in (CLIENT, SERVER): - raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}") - self.our_role = our_role - self.their_role: Type[Sentinel] - if our_role is CLIENT: - self.their_role = SERVER - else: - self.their_role = CLIENT - self._cstate = ConnectionState() - - # Callables for converting data->events or vice-versa given the - # current state - self._writer = self._get_io_object(self.our_role, None, WRITERS) - self._reader = self._get_io_object(self.their_role, None, READERS) - - # Holds any unprocessed received data - self._receive_buffer = ReceiveBuffer() - # If this is true, then it indicates that the incoming connection was - # closed *after* the end of whatever's in self._receive_buffer: - self._receive_buffer_closed = False - - # Extra bits of state that don't fit into the state machine. - # - # These two are only used to interpret framing headers for figuring - # out how to read/write response bodies. their_http_version is also - # made available as a convenient public API. - self.their_http_version: Optional[bytes] = None - self._request_method: Optional[bytes] = None - # This is pure flow-control and doesn't at all affect the set of legal - # transitions, so no need to bother ConnectionState with it: - self.client_is_waiting_for_100_continue = False - - @property - def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: - """A dictionary like:: - - {CLIENT: , SERVER: } - - See :ref:`state-machine` for details. - - """ - return dict(self._cstate.states) - - @property - def our_state(self) -> Type[Sentinel]: - """The current state of whichever role we are playing. See - :ref:`state-machine` for details. - """ - return self._cstate.states[self.our_role] - - @property - def their_state(self) -> Type[Sentinel]: - """The current state of whichever role we are NOT playing. See - :ref:`state-machine` for details. - """ - return self._cstate.states[self.their_role] - - @property - def they_are_waiting_for_100_continue(self) -> bool: - return self.their_role is CLIENT and self.client_is_waiting_for_100_continue - - def start_next_cycle(self) -> None: - """Attempt to reset our connection state for a new request/response - cycle. - - If both client and server are in :data:`DONE` state, then resets them - both to :data:`IDLE` state in preparation for a new request/response - cycle on this same connection. Otherwise, raises a - :exc:`LocalProtocolError`. - - See :ref:`keepalive-and-pipelining`. - - """ - old_states = dict(self._cstate.states) - self._cstate.start_next_cycle() - self._request_method = None - # self.their_http_version gets left alone, since it presumably lasts - # beyond a single request/response cycle - assert not self.client_is_waiting_for_100_continue - self._respond_to_state_changes(old_states) - - def _process_error(self, role: Type[Sentinel]) -> None: - old_states = dict(self._cstate.states) - self._cstate.process_error(role) - self._respond_to_state_changes(old_states) - - def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: - if type(event) is InformationalResponse and event.status_code == 101: - return _SWITCH_UPGRADE - if type(event) is Response: - if ( - _SWITCH_CONNECT in self._cstate.pending_switch_proposals - and 200 <= event.status_code < 300 - ): - return _SWITCH_CONNECT - return None - - # All events go through here - def _process_event(self, role: Type[Sentinel], event: Event) -> None: - # First, pass the event through the state machine to make sure it - # succeeds. - old_states = dict(self._cstate.states) - if role is CLIENT and type(event) is Request: - if event.method == b"CONNECT": - self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) - if get_comma_header(event.headers, b"upgrade"): - self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) - server_switch_event = None - if role is SERVER: - server_switch_event = self._server_switch_event(event) - self._cstate.process_event(role, type(event), server_switch_event) - - # Then perform the updates triggered by it. - - if type(event) is Request: - self._request_method = event.method - - if role is self.their_role and type(event) in ( - Request, - Response, - InformationalResponse, - ): - event = cast(Union[Request, Response, InformationalResponse], event) - self.their_http_version = event.http_version - - # Keep alive handling - # - # RFC 7230 doesn't really say what one should do if Connection: close - # shows up on a 1xx InformationalResponse. I think the idea is that - # this is not supposed to happen. In any case, if it does happen, we - # ignore it. - if type(event) in (Request, Response) and not _keep_alive( - cast(Union[Request, Response], event) - ): - self._cstate.process_keep_alive_disabled() - - # 100-continue - if type(event) is Request and has_expect_100_continue(event): - self.client_is_waiting_for_100_continue = True - if type(event) in (InformationalResponse, Response): - self.client_is_waiting_for_100_continue = False - if role is CLIENT and type(event) in (Data, EndOfMessage): - self.client_is_waiting_for_100_continue = False - - self._respond_to_state_changes(old_states, event) - - def _get_io_object( - self, - role: Type[Sentinel], - event: Optional[Event], - io_dict: Union[ReadersType, WritersType], - ) -> Optional[Callable[..., Any]]: - # event may be None; it's only used when entering SEND_BODY - state = self._cstate.states[role] - if state is SEND_BODY: - # Special case: the io_dict has a dict of reader/writer factories - # that depend on the request/response framing. - framing_type, args = _body_framing( - cast(bytes, self._request_method), cast(Union[Request, Response], event) - ) - return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] - else: - # General case: the io_dict just has the appropriate reader/writer - # for this state - return io_dict.get((role, state)) # type: ignore[return-value] - - # This must be called after any action that might have caused - # self._cstate.states to change. - def _respond_to_state_changes( - self, - old_states: Dict[Type[Sentinel], Type[Sentinel]], - event: Optional[Event] = None, - ) -> None: - # Update reader/writer - if self.our_state != old_states[self.our_role]: - self._writer = self._get_io_object(self.our_role, event, WRITERS) - if self.their_state != old_states[self.their_role]: - self._reader = self._get_io_object(self.their_role, event, READERS) - - @property - def trailing_data(self) -> Tuple[bytes, bool]: - """Data that has been received, but not yet processed, represented as - a tuple with two elements, where the first is a byte-string containing - the unprocessed data itself, and the second is a bool that is True if - the receive connection was closed. - - See :ref:`switching-protocols` for discussion of why you'd want this. - """ - return (bytes(self._receive_buffer), self._receive_buffer_closed) - - def receive_data(self, data: bytes) -> None: - """Add data to our internal receive buffer. - - This does not actually do any processing on the data, just stores - it. To trigger processing, you have to call :meth:`next_event`. - - Args: - data (:term:`bytes-like object`): - The new data that was just received. - - Special case: If *data* is an empty byte-string like ``b""``, - then this indicates that the remote side has closed the - connection (end of file). Normally this is convenient, because - standard Python APIs like :meth:`file.read` or - :meth:`socket.recv` use ``b""`` to indicate end-of-file, while - other failures to read are indicated using other mechanisms - like raising :exc:`TimeoutError`. When using such an API you - can just blindly pass through whatever you get from ``read`` - to :meth:`receive_data`, and everything will work. - - But, if you have an API where reading an empty string is a - valid non-EOF condition, then you need to be aware of this and - make sure to check for such strings and avoid passing them to - :meth:`receive_data`. - - Returns: - Nothing, but after calling this you should call :meth:`next_event` - to parse the newly received data. - - Raises: - RuntimeError: - Raised if you pass an empty *data*, indicating EOF, and then - pass a non-empty *data*, indicating more data that somehow - arrived after the EOF. - - (Calling ``receive_data(b"")`` multiple times is fine, - and equivalent to calling it once.) - - """ - if data: - if self._receive_buffer_closed: - raise RuntimeError("received close, then received more data?") - self._receive_buffer += data - else: - self._receive_buffer_closed = True - - def _extract_next_receive_event( - self, - ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: - state = self.their_state - # We don't pause immediately when they enter DONE, because even in - # DONE state we can still process a ConnectionClosed() event. But - # if we have data in our buffer, then we definitely aren't getting - # a ConnectionClosed() immediately and we need to pause. - if state is DONE and self._receive_buffer: - return PAUSED - if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: - return PAUSED - assert self._reader is not None - event = self._reader(self._receive_buffer) - if event is None: - if not self._receive_buffer and self._receive_buffer_closed: - # In some unusual cases (basically just HTTP/1.0 bodies), EOF - # triggers an actual protocol event; in that case, we want to - # return that event, and then the state will change and we'll - # get called again to generate the actual ConnectionClosed(). - if hasattr(self._reader, "read_eof"): - event = self._reader.read_eof() - else: - event = ConnectionClosed() - if event is None: - event = NEED_DATA - return event # type: ignore[no-any-return] - - def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: - """Parse the next event out of our receive buffer, update our internal - state, and return it. - - This is a mutating operation -- think of it like calling :func:`next` - on an iterator. - - Returns: - : One of three things: - - 1) An event object -- see :ref:`events`. - - 2) The special constant :data:`NEED_DATA`, which indicates that - you need to read more data from your socket and pass it to - :meth:`receive_data` before this method will be able to return - any more events. - - 3) The special constant :data:`PAUSED`, which indicates that we - are not in a state where we can process incoming data (usually - because the peer has finished their part of the current - request/response cycle, and you have not yet called - :meth:`start_next_cycle`). See :ref:`flow-control` for details. - - Raises: - RemoteProtocolError: - The peer has misbehaved. You should close the connection - (possibly after sending some kind of 4xx response). - - Once this method returns :class:`ConnectionClosed` once, then all - subsequent calls will also return :class:`ConnectionClosed`. - - If this method raises any exception besides :exc:`RemoteProtocolError` - then that's a bug -- if it happens please file a bug report! - - If this method raises any exception then it also sets - :attr:`Connection.their_state` to :data:`ERROR` -- see - :ref:`error-handling` for discussion. - - """ - - if self.their_state is ERROR: - raise RemoteProtocolError("Can't receive data when peer state is ERROR") - try: - event = self._extract_next_receive_event() - if event not in [NEED_DATA, PAUSED]: - self._process_event(self.their_role, cast(Event, event)) - if event is NEED_DATA: - if len(self._receive_buffer) > self._max_incomplete_event_size: - # 431 is "Request header fields too large" which is pretty - # much the only situation where we can get here - raise RemoteProtocolError( - "Receive buffer too long", error_status_hint=431 - ) - if self._receive_buffer_closed: - # We're still trying to complete some event, but that's - # never going to happen because no more data is coming - raise RemoteProtocolError("peer unexpectedly closed connection") - return event - except BaseException as exc: - self._process_error(self.their_role) - if isinstance(exc, LocalProtocolError): - exc._reraise_as_remote_protocol_error() - else: - raise - - @overload - def send(self, event: ConnectionClosed) -> None: - ... - - @overload - def send( - self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage] - ) -> bytes: - ... - - @overload - def send(self, event: Event) -> Optional[bytes]: - ... - - def send(self, event: Event) -> Optional[bytes]: - """Convert a high-level event into bytes that can be sent to the peer, - while updating our internal state machine. - - Args: - event: The :ref:`event ` to send. - - Returns: - If ``type(event) is ConnectionClosed``, then returns - ``None``. Otherwise, returns a :term:`bytes-like object`. - - Raises: - LocalProtocolError: - Sending this event at this time would violate our - understanding of the HTTP/1.1 protocol. - - If this method raises any exception then it also sets - :attr:`Connection.our_state` to :data:`ERROR` -- see - :ref:`error-handling` for discussion. - - """ - data_list = self.send_with_data_passthrough(event) - if data_list is None: - return None - else: - return b"".join(data_list) - - def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: - """Identical to :meth:`send`, except that in situations where - :meth:`send` returns a single :term:`bytes-like object`, this instead - returns a list of them -- and when sending a :class:`Data` event, this - list is guaranteed to contain the exact object you passed in as - :attr:`Data.data`. See :ref:`sendfile` for discussion. - - """ - if self.our_state is ERROR: - raise LocalProtocolError("Can't send data when our state is ERROR") - try: - if type(event) is Response: - event = self._clean_up_response_headers_for_sending(event) - # We want to call _process_event before calling the writer, - # because if someone tries to do something invalid then this will - # give a sensible error message, while our writers all just assume - # they will only receive valid events. But, _process_event might - # change self._writer. So we have to do a little dance: - writer = self._writer - self._process_event(self.our_role, event) - if type(event) is ConnectionClosed: - return None - else: - # In any situation where writer is None, process_event should - # have raised ProtocolError - assert writer is not None - data_list: List[bytes] = [] - writer(event, data_list.append) - return data_list - except: - self._process_error(self.our_role) - raise - - def send_failed(self) -> None: - """Notify the state machine that we failed to send the data it gave - us. - - This causes :attr:`Connection.our_state` to immediately become - :data:`ERROR` -- see :ref:`error-handling` for discussion. - - """ - self._process_error(self.our_role) - - # When sending a Response, we take responsibility for a few things: - # - # - Sometimes you MUST set Connection: close. We take care of those - # times. (You can also set it yourself if you want, and if you do then - # we'll respect that and close the connection at the right time. But you - # don't have to worry about that unless you want to.) - # - # - The user has to set Content-Length if they want it. Otherwise, for - # responses that have bodies (e.g. not HEAD), then we will automatically - # select the right mechanism for streaming a body of unknown length, - # which depends on depending on the peer's HTTP version. - # - # This function's *only* responsibility is making sure headers are set up - # right -- everything downstream just looks at the headers. There are no - # side channels. - def _clean_up_response_headers_for_sending(self, response: Response) -> Response: - assert type(response) is Response - - headers = response.headers - need_close = False - - # HEAD requests need some special handling: they always act like they - # have Content-Length: 0, and that's how _body_framing treats - # them. But their headers are supposed to match what we would send if - # the request was a GET. (Technically there is one deviation allowed: - # we're allowed to leave out the framing headers -- see - # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as - # easy to get them right.) - method_for_choosing_headers = cast(bytes, self._request_method) - if method_for_choosing_headers == b"HEAD": - method_for_choosing_headers = b"GET" - framing_type, _ = _body_framing(method_for_choosing_headers, response) - if framing_type in ("chunked", "http/1.0"): - # This response has a body of unknown length. - # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked - # If our peer is HTTP/1.0, we use no framing headers, and close the - # connection afterwards. - # - # Make sure to clear Content-Length (in principle user could have - # set both and then we ignored Content-Length b/c - # Transfer-Encoding overwrote it -- this would be naughty of them, - # but the HTTP spec says that if our peer does this then we have - # to fix it instead of erroring out, so we'll accord the user the - # same respect). - headers = set_comma_header(headers, b"content-length", []) - if self.their_http_version is None or self.their_http_version < b"1.1": - # Either we never got a valid request and are sending back an - # error (their_http_version is None), so we assume the worst; - # or else we did get a valid HTTP/1.0 request, so we know that - # they don't understand chunked encoding. - headers = set_comma_header(headers, b"transfer-encoding", []) - # This is actually redundant ATM, since currently we - # unconditionally disable keep-alive when talking to HTTP/1.0 - # peers. But let's be defensive just in case we add - # Connection: keep-alive support later: - if self._request_method != b"HEAD": - need_close = True - else: - headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) - - if not self._cstate.keep_alive or need_close: - # Make sure Connection: close is set - connection = set(get_comma_header(headers, b"connection")) - connection.discard(b"keep-alive") - connection.add(b"close") - headers = set_comma_header(headers, b"connection", sorted(connection)) - - return Response( - headers=headers, - status_code=response.status_code, - http_version=response.http_version, - reason=response.reason, - ) diff --git a/venv/lib/python3.12/site-packages/h11/_events.py b/venv/lib/python3.12/site-packages/h11/_events.py deleted file mode 100644 index ca1c3adb..00000000 --- a/venv/lib/python3.12/site-packages/h11/_events.py +++ /dev/null @@ -1,369 +0,0 @@ -# High level events that make up HTTP/1.1 conversations. Loosely inspired by -# the corresponding events in hyper-h2: -# -# http://python-hyper.org/h2/en/stable/api.html#events -# -# Don't subclass these. Stuff will break. - -import re -from abc import ABC -from dataclasses import dataclass -from typing import List, Tuple, Union - -from ._abnf import method, request_target -from ._headers import Headers, normalize_and_validate -from ._util import bytesify, LocalProtocolError, validate - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = [ - "Event", - "Request", - "InformationalResponse", - "Response", - "Data", - "EndOfMessage", - "ConnectionClosed", -] - -method_re = re.compile(method.encode("ascii")) -request_target_re = re.compile(request_target.encode("ascii")) - - -class Event(ABC): - """ - Base class for h11 events. - """ - - __slots__ = () - - -@dataclass(init=False, frozen=True) -class Request(Event): - """The beginning of an HTTP request. - - Fields: - - .. attribute:: method - - An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte - string. :term:`Bytes-like objects ` and native - strings containing only ascii characters will be automatically - converted to byte strings. - - .. attribute:: target - - The target of an HTTP request, e.g. ``b"/index.html"``, or one of the - more exotic formats described in `RFC 7320, section 5.3 - `_. Always a byte - string. :term:`Bytes-like objects ` and native - strings containing only ascii characters will be automatically - converted to byte strings. - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - """ - - __slots__ = ("method", "headers", "target", "http_version") - - method: bytes - headers: Headers - target: bytes - http_version: bytes - - def __init__( - self, - *, - method: Union[bytes, str], - headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], - target: Union[bytes, str], - http_version: Union[bytes, str] = b"1.1", - _parsed: bool = False, - ) -> None: - super().__init__() - if isinstance(headers, Headers): - object.__setattr__(self, "headers", headers) - else: - object.__setattr__( - self, "headers", normalize_and_validate(headers, _parsed=_parsed) - ) - if not _parsed: - object.__setattr__(self, "method", bytesify(method)) - object.__setattr__(self, "target", bytesify(target)) - object.__setattr__(self, "http_version", bytesify(http_version)) - else: - object.__setattr__(self, "method", method) - object.__setattr__(self, "target", target) - object.__setattr__(self, "http_version", http_version) - - # "A server MUST respond with a 400 (Bad Request) status code to any - # HTTP/1.1 request message that lacks a Host header field and to any - # request message that contains more than one Host header field or a - # Host header field with an invalid field-value." - # -- https://tools.ietf.org/html/rfc7230#section-5.4 - host_count = 0 - for name, value in self.headers: - if name == b"host": - host_count += 1 - if self.http_version == b"1.1" and host_count == 0: - raise LocalProtocolError("Missing mandatory Host: header") - if host_count > 1: - raise LocalProtocolError("Found multiple Host: headers") - - validate(method_re, self.method, "Illegal method characters") - validate(request_target_re, self.target, "Illegal target characters") - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class _ResponseBase(Event): - __slots__ = ("headers", "http_version", "reason", "status_code") - - headers: Headers - http_version: bytes - reason: bytes - status_code: int - - def __init__( - self, - *, - headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], - status_code: int, - http_version: Union[bytes, str] = b"1.1", - reason: Union[bytes, str] = b"", - _parsed: bool = False, - ) -> None: - super().__init__() - if isinstance(headers, Headers): - object.__setattr__(self, "headers", headers) - else: - object.__setattr__( - self, "headers", normalize_and_validate(headers, _parsed=_parsed) - ) - if not _parsed: - object.__setattr__(self, "reason", bytesify(reason)) - object.__setattr__(self, "http_version", bytesify(http_version)) - if not isinstance(status_code, int): - raise LocalProtocolError("status code must be integer") - # Because IntEnum objects are instances of int, but aren't - # duck-compatible (sigh), see gh-72. - object.__setattr__(self, "status_code", int(status_code)) - else: - object.__setattr__(self, "reason", reason) - object.__setattr__(self, "http_version", http_version) - object.__setattr__(self, "status_code", status_code) - - self.__post_init__() - - def __post_init__(self) -> None: - pass - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class InformationalResponse(_ResponseBase): - """An HTTP informational response. - - Fields: - - .. attribute:: status_code - - The status code of this response, as an integer. For an - :class:`InformationalResponse`, this is always in the range [100, - 200). - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for - details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - .. attribute:: reason - - The reason phrase of this response, as a byte string. For example: - ``b"OK"``, or ``b"Not Found"``. - - """ - - def __post_init__(self) -> None: - if not (100 <= self.status_code < 200): - raise LocalProtocolError( - "InformationalResponse status_code should be in range " - "[100, 200), not {}".format(self.status_code) - ) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class Response(_ResponseBase): - """The beginning of an HTTP response. - - Fields: - - .. attribute:: status_code - - The status code of this response, as an integer. For an - :class:`Response`, this is always in the range [200, - 1000). - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - .. attribute:: reason - - The reason phrase of this response, as a byte string. For example: - ``b"OK"``, or ``b"Not Found"``. - - """ - - def __post_init__(self) -> None: - if not (200 <= self.status_code < 1000): - raise LocalProtocolError( - "Response status_code should be in range [200, 1000), not {}".format( - self.status_code - ) - ) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class Data(Event): - """Part of an HTTP message body. - - Fields: - - .. attribute:: data - - A :term:`bytes-like object` containing part of a message body. Or, if - using the ``combine=False`` argument to :meth:`Connection.send`, then - any object that your socket writing code knows what to do with, and for - which calling :func:`len` returns the number of bytes that will be - written -- see :ref:`sendfile` for details. - - .. attribute:: chunk_start - - A marker that indicates whether this data object is from the start of a - chunked transfer encoding chunk. This field is ignored when when a Data - event is provided to :meth:`Connection.send`: it is only valid on - events emitted from :meth:`Connection.next_event`. You probably - shouldn't use this attribute at all; see - :ref:`chunk-delimiters-are-bad` for details. - - .. attribute:: chunk_end - - A marker that indicates whether this data object is the last for a - given chunked transfer encoding chunk. This field is ignored when when - a Data event is provided to :meth:`Connection.send`: it is only valid - on events emitted from :meth:`Connection.next_event`. You probably - shouldn't use this attribute at all; see - :ref:`chunk-delimiters-are-bad` for details. - - """ - - __slots__ = ("data", "chunk_start", "chunk_end") - - data: bytes - chunk_start: bool - chunk_end: bool - - def __init__( - self, data: bytes, chunk_start: bool = False, chunk_end: bool = False - ) -> None: - object.__setattr__(self, "data", data) - object.__setattr__(self, "chunk_start", chunk_start) - object.__setattr__(self, "chunk_end", chunk_end) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that -# are forbidden to be sent in a trailer, since processing them as if they were -# present in the header section might bypass external security filters." -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part -# Unfortunately, the list of forbidden fields is long and vague :-/ -@dataclass(init=False, frozen=True) -class EndOfMessage(Event): - """The end of an HTTP message. - - Fields: - - .. attribute:: headers - - Default value: ``[]`` - - Any trailing headers attached to this message, represented as a list of - (name, value) pairs. See :ref:`the header normalization rules - ` for details. - - Must be empty unless ``Transfer-Encoding: chunked`` is in use. - - """ - - __slots__ = ("headers",) - - headers: Headers - - def __init__( - self, - *, - headers: Union[ - Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None - ] = None, - _parsed: bool = False, - ) -> None: - super().__init__() - if headers is None: - headers = Headers([]) - elif not isinstance(headers, Headers): - headers = normalize_and_validate(headers, _parsed=_parsed) - - object.__setattr__(self, "headers", headers) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(frozen=True) -class ConnectionClosed(Event): - """This event indicates that the sender has closed their outgoing - connection. - - Note that this does not necessarily mean that they can't *receive* further - data, because TCP connections are composed to two one-way channels which - can be closed independently. See :ref:`closing` for details. - - No fields. - """ - - pass diff --git a/venv/lib/python3.12/site-packages/h11/_headers.py b/venv/lib/python3.12/site-packages/h11/_headers.py deleted file mode 100644 index 31da3e2b..00000000 --- a/venv/lib/python3.12/site-packages/h11/_headers.py +++ /dev/null @@ -1,282 +0,0 @@ -import re -from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union - -from ._abnf import field_name, field_value -from ._util import bytesify, LocalProtocolError, validate - -if TYPE_CHECKING: - from ._events import Request - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - -CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1 - - -# Facts -# ----- -# -# Headers are: -# keys: case-insensitive ascii -# values: mixture of ascii and raw bytes -# -# "Historically, HTTP has allowed field content with text in the ISO-8859-1 -# charset [ISO-8859-1], supporting other charsets only through use of -# [RFC2047] encoding. In practice, most HTTP header field values use only a -# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD -# limit their field values to US-ASCII octets. A recipient SHOULD treat other -# octets in field content (obs-text) as opaque data." -# And it deprecates all non-ascii values -# -# Leading/trailing whitespace in header names is forbidden -# -# Values get leading/trailing whitespace stripped -# -# Content-Disposition actually needs to contain unicode semantically; to -# accomplish this it has a terrifically weird way of encoding the filename -# itself as ascii (and even this still has lots of cross-browser -# incompatibilities) -# -# Order is important: -# "a proxy MUST NOT change the order of these field values when forwarding a -# message" -# (and there are several headers where the order indicates a preference) -# -# Multiple occurences of the same header: -# "A sender MUST NOT generate multiple header fields with the same field name -# in a message unless either the entire field value for that header field is -# defined as a comma-separated list [or the header is Set-Cookie which gets a -# special exception]" - RFC 7230. (cookies are in RFC 6265) -# -# So every header aside from Set-Cookie can be merged by b", ".join if it -# occurs repeatedly. But, of course, they can't necessarily be split by -# .split(b","), because quoting. -# -# Given all this mess (case insensitive, duplicates allowed, order is -# important, ...), there doesn't appear to be any standard way to handle -# headers in Python -- they're almost like dicts, but... actually just -# aren't. For now we punt and just use a super simple representation: headers -# are a list of pairs -# -# [(name1, value1), (name2, value2), ...] -# -# where all entries are bytestrings, names are lowercase and have no -# leading/trailing whitespace, and values are bytestrings with no -# leading/trailing whitespace. Searching and updating are done via naive O(n) -# methods. -# -# Maybe a dict-of-lists would be better? - -_content_length_re = re.compile(rb"[0-9]+") -_field_name_re = re.compile(field_name.encode("ascii")) -_field_value_re = re.compile(field_value.encode("ascii")) - - -class Headers(Sequence[Tuple[bytes, bytes]]): - """ - A list-like interface that allows iterating over headers as byte-pairs - of (lowercased-name, value). - - Internally we actually store the representation as three-tuples, - including both the raw original casing, in order to preserve casing - over-the-wire, and the lowercased name, for case-insensitive comparisions. - - r = Request( - method="GET", - target="/", - headers=[("Host", "example.org"), ("Connection", "keep-alive")], - http_version="1.1", - ) - assert r.headers == [ - (b"host", b"example.org"), - (b"connection", b"keep-alive") - ] - assert r.headers.raw_items() == [ - (b"Host", b"example.org"), - (b"Connection", b"keep-alive") - ] - """ - - __slots__ = "_full_items" - - def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: - self._full_items = full_items - - def __bool__(self) -> bool: - return bool(self._full_items) - - def __eq__(self, other: object) -> bool: - return list(self) == list(other) # type: ignore - - def __len__(self) -> int: - return len(self._full_items) - - def __repr__(self) -> str: - return "" % repr(list(self)) - - def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] - _, name, value = self._full_items[idx] - return (name, value) - - def raw_items(self) -> List[Tuple[bytes, bytes]]: - return [(raw_name, value) for raw_name, _, value in self._full_items] - - -HeaderTypes = Union[ - List[Tuple[bytes, bytes]], - List[Tuple[bytes, str]], - List[Tuple[str, bytes]], - List[Tuple[str, str]], -] - - -@overload -def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: - ... - - -@overload -def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: - ... - - -@overload -def normalize_and_validate( - headers: Union[Headers, HeaderTypes], _parsed: bool = False -) -> Headers: - ... - - -def normalize_and_validate( - headers: Union[Headers, HeaderTypes], _parsed: bool = False -) -> Headers: - new_headers = [] - seen_content_length = None - saw_transfer_encoding = False - for name, value in headers: - # For headers coming out of the parser, we can safely skip some steps, - # because it always returns bytes and has already run these regexes - # over the data: - if not _parsed: - name = bytesify(name) - value = bytesify(value) - validate(_field_name_re, name, "Illegal header name {!r}", name) - validate(_field_value_re, value, "Illegal header value {!r}", value) - assert isinstance(name, bytes) - assert isinstance(value, bytes) - - raw_name = name - name = name.lower() - if name == b"content-length": - lengths = {length.strip() for length in value.split(b",")} - if len(lengths) != 1: - raise LocalProtocolError("conflicting Content-Length headers") - value = lengths.pop() - validate(_content_length_re, value, "bad Content-Length") - if len(value) > CONTENT_LENGTH_MAX_DIGITS: - raise LocalProtocolError("bad Content-Length") - if seen_content_length is None: - seen_content_length = value - new_headers.append((raw_name, name, value)) - elif seen_content_length != value: - raise LocalProtocolError("conflicting Content-Length headers") - elif name == b"transfer-encoding": - # "A server that receives a request message with a transfer coding - # it does not understand SHOULD respond with 501 (Not - # Implemented)." - # https://tools.ietf.org/html/rfc7230#section-3.3.1 - if saw_transfer_encoding: - raise LocalProtocolError( - "multiple Transfer-Encoding headers", error_status_hint=501 - ) - # "All transfer-coding names are case-insensitive" - # -- https://tools.ietf.org/html/rfc7230#section-4 - value = value.lower() - if value != b"chunked": - raise LocalProtocolError( - "Only Transfer-Encoding: chunked is supported", - error_status_hint=501, - ) - saw_transfer_encoding = True - new_headers.append((raw_name, name, value)) - else: - new_headers.append((raw_name, name, value)) - return Headers(new_headers) - - -def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: - # Should only be used for headers whose value is a list of - # comma-separated, case-insensitive values. - # - # The header name `name` is expected to be lower-case bytes. - # - # Connection: meets these criteria (including cast insensitivity). - # - # Content-Length: technically is just a single value (1*DIGIT), but the - # standard makes reference to implementations that do multiple values, and - # using this doesn't hurt. Ditto, case insensitivity doesn't things either - # way. - # - # Transfer-Encoding: is more complex (allows for quoted strings), so - # splitting on , is actually wrong. For example, this is legal: - # - # Transfer-Encoding: foo; options="1,2", chunked - # - # and should be parsed as - # - # foo; options="1,2" - # chunked - # - # but this naive function will parse it as - # - # foo; options="1 - # 2" - # chunked - # - # However, this is okay because the only thing we are going to do with - # any Transfer-Encoding is reject ones that aren't just "chunked", so - # both of these will be treated the same anyway. - # - # Expect: the only legal value is the literal string - # "100-continue". Splitting on commas is harmless. Case insensitive. - # - out: List[bytes] = [] - for _, found_name, found_raw_value in headers._full_items: - if found_name == name: - found_raw_value = found_raw_value.lower() - for found_split_value in found_raw_value.split(b","): - found_split_value = found_split_value.strip() - if found_split_value: - out.append(found_split_value) - return out - - -def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: - # The header name `name` is expected to be lower-case bytes. - # - # Note that when we store the header we use title casing for the header - # names, in order to match the conventional HTTP header style. - # - # Simply calling `.title()` is a blunt approach, but it's correct - # here given the cases where we're using `set_comma_header`... - # - # Connection, Content-Length, Transfer-Encoding. - new_headers: List[Tuple[bytes, bytes]] = [] - for found_raw_name, found_name, found_raw_value in headers._full_items: - if found_name != name: - new_headers.append((found_raw_name, found_raw_value)) - for new_value in new_values: - new_headers.append((name.title(), new_value)) - return normalize_and_validate(new_headers) - - -def has_expect_100_continue(request: "Request") -> bool: - # https://tools.ietf.org/html/rfc7231#section-5.1.1 - # "A server that receives a 100-continue expectation in an HTTP/1.0 request - # MUST ignore that expectation." - if request.http_version < b"1.1": - return False - expect = get_comma_header(request.headers, b"expect") - return b"100-continue" in expect diff --git a/venv/lib/python3.12/site-packages/h11/_readers.py b/venv/lib/python3.12/site-packages/h11/_readers.py deleted file mode 100644 index 576804cc..00000000 --- a/venv/lib/python3.12/site-packages/h11/_readers.py +++ /dev/null @@ -1,250 +0,0 @@ -# Code to read HTTP data -# -# Strategy: each reader is a callable which takes a ReceiveBuffer object, and -# either: -# 1) consumes some of it and returns an Event -# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() -# and it might raise a LocalProtocolError, so simpler just to always use -# this) -# 3) returns None, meaning "I need more data" -# -# If they have a .read_eof attribute, then this will be called if an EOF is -# received -- but this is optional. Either way, the actual ConnectionClosed -# event will be generated afterwards. -# -# READERS is a dict describing how to pick a reader. It maps states to either: -# - a reader -# - or, for body readers, a dict of per-framing reader factories - -import re -from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union - -from ._abnf import chunk_header, header_field, request_line, status_line -from ._events import Data, EndOfMessage, InformationalResponse, Request, Response -from ._receivebuffer import ReceiveBuffer -from ._state import ( - CLIENT, - CLOSED, - DONE, - IDLE, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, -) -from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate - -__all__ = ["READERS"] - -header_field_re = re.compile(header_field.encode("ascii")) -obs_fold_re = re.compile(rb"[ \t]+") - - -def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: - it = iter(lines) - last: Optional[bytes] = None - for line in it: - match = obs_fold_re.match(line) - if match: - if last is None: - raise LocalProtocolError("continuation line at start of headers") - if not isinstance(last, bytearray): - # Cast to a mutable type, avoiding copy on append to ensure O(n) time - last = bytearray(last) - last += b" " - last += line[match.end() :] - else: - if last is not None: - yield last - last = line - if last is not None: - yield last - - -def _decode_header_lines( - lines: Iterable[bytes], -) -> Iterable[Tuple[bytes, bytes]]: - for line in _obsolete_line_fold(lines): - matches = validate(header_field_re, line, "illegal header line: {!r}", line) - yield (matches["field_name"], matches["field_value"]) - - -request_line_re = re.compile(request_line.encode("ascii")) - - -def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: - lines = buf.maybe_extract_lines() - if lines is None: - if buf.is_next_line_obviously_invalid_request_line(): - raise LocalProtocolError("illegal request line") - return None - if not lines: - raise LocalProtocolError("no request line received") - matches = validate( - request_line_re, lines[0], "illegal request line: {!r}", lines[0] - ) - return Request( - headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches - ) - - -status_line_re = re.compile(status_line.encode("ascii")) - - -def maybe_read_from_SEND_RESPONSE_server( - buf: ReceiveBuffer, -) -> Union[InformationalResponse, Response, None]: - lines = buf.maybe_extract_lines() - if lines is None: - if buf.is_next_line_obviously_invalid_request_line(): - raise LocalProtocolError("illegal request line") - return None - if not lines: - raise LocalProtocolError("no response line received") - matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) - http_version = ( - b"1.1" if matches["http_version"] is None else matches["http_version"] - ) - reason = b"" if matches["reason"] is None else matches["reason"] - status_code = int(matches["status_code"]) - class_: Union[Type[InformationalResponse], Type[Response]] = ( - InformationalResponse if status_code < 200 else Response - ) - return class_( - headers=list(_decode_header_lines(lines[1:])), - _parsed=True, - status_code=status_code, - reason=reason, - http_version=http_version, - ) - - -class ContentLengthReader: - def __init__(self, length: int) -> None: - self._length = length - self._remaining = length - - def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: - if self._remaining == 0: - return EndOfMessage() - data = buf.maybe_extract_at_most(self._remaining) - if data is None: - return None - self._remaining -= len(data) - return Data(data=data) - - def read_eof(self) -> NoReturn: - raise RemoteProtocolError( - "peer closed connection without sending complete message body " - "(received {} bytes, expected {})".format( - self._length - self._remaining, self._length - ) - ) - - -chunk_header_re = re.compile(chunk_header.encode("ascii")) - - -class ChunkedReader: - def __init__(self) -> None: - self._bytes_in_chunk = 0 - # After reading a chunk, we have to throw away the trailing \r\n. - # This tracks the bytes that we need to match and throw away. - self._bytes_to_discard = b"" - self._reading_trailer = False - - def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: - if self._reading_trailer: - lines = buf.maybe_extract_lines() - if lines is None: - return None - return EndOfMessage(headers=list(_decode_header_lines(lines))) - if self._bytes_to_discard: - data = buf.maybe_extract_at_most(len(self._bytes_to_discard)) - if data is None: - return None - if data != self._bytes_to_discard[: len(data)]: - raise LocalProtocolError( - f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})" - ) - self._bytes_to_discard = self._bytes_to_discard[len(data) :] - if self._bytes_to_discard: - return None - # else, fall through and read some more - assert self._bytes_to_discard == b"" - if self._bytes_in_chunk == 0: - # We need to refill our chunk count - chunk_header = buf.maybe_extract_next_line() - if chunk_header is None: - return None - matches = validate( - chunk_header_re, - chunk_header, - "illegal chunk header: {!r}", - chunk_header, - ) - # XX FIXME: we discard chunk extensions. Does anyone care? - self._bytes_in_chunk = int(matches["chunk_size"], base=16) - if self._bytes_in_chunk == 0: - self._reading_trailer = True - return self(buf) - chunk_start = True - else: - chunk_start = False - assert self._bytes_in_chunk > 0 - data = buf.maybe_extract_at_most(self._bytes_in_chunk) - if data is None: - return None - self._bytes_in_chunk -= len(data) - if self._bytes_in_chunk == 0: - self._bytes_to_discard = b"\r\n" - chunk_end = True - else: - chunk_end = False - return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) - - def read_eof(self) -> NoReturn: - raise RemoteProtocolError( - "peer closed connection without sending complete message body " - "(incomplete chunked read)" - ) - - -class Http10Reader: - def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: - data = buf.maybe_extract_at_most(999999999) - if data is None: - return None - return Data(data=data) - - def read_eof(self) -> EndOfMessage: - return EndOfMessage() - - -def expect_nothing(buf: ReceiveBuffer) -> None: - if buf: - raise LocalProtocolError("Got data when expecting EOF") - return None - - -ReadersType = Dict[ - Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], - Union[Callable[..., Any], Dict[str, Callable[..., Any]]], -] - -READERS: ReadersType = { - (CLIENT, IDLE): maybe_read_from_IDLE_client, - (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, - (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, - (CLIENT, DONE): expect_nothing, - (CLIENT, MUST_CLOSE): expect_nothing, - (CLIENT, CLOSED): expect_nothing, - (SERVER, DONE): expect_nothing, - (SERVER, MUST_CLOSE): expect_nothing, - (SERVER, CLOSED): expect_nothing, - SEND_BODY: { - "chunked": ChunkedReader, - "content-length": ContentLengthReader, - "http/1.0": Http10Reader, - }, -} diff --git a/venv/lib/python3.12/site-packages/h11/_receivebuffer.py b/venv/lib/python3.12/site-packages/h11/_receivebuffer.py deleted file mode 100644 index e5c4e08a..00000000 --- a/venv/lib/python3.12/site-packages/h11/_receivebuffer.py +++ /dev/null @@ -1,153 +0,0 @@ -import re -import sys -from typing import List, Optional, Union - -__all__ = ["ReceiveBuffer"] - - -# Operations we want to support: -# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), -# or wait until there is one -# - read at-most-N bytes -# Goals: -# - on average, do this fast -# - worst case, do this in O(n) where n is the number of bytes processed -# Plan: -# - store bytearray, offset, how far we've searched for a separator token -# - use the how-far-we've-searched data to avoid rescanning -# - while doing a stream of uninterrupted processing, advance offset instead -# of constantly copying -# WARNING: -# - I haven't benchmarked or profiled any of this yet. -# -# Note that starting in Python 3.4, deleting the initial n bytes from a -# bytearray is amortized O(n), thanks to some excellent work by Antoine -# Martin: -# -# https://bugs.python.org/issue19087 -# -# This means that if we only supported 3.4+, we could get rid of the code here -# involving self._start and self.compress, because it's doing exactly the same -# thing that bytearray now does internally. -# -# BUT unfortunately, we still support 2.7, and reading short segments out of a -# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually -# delete this code. Yet: -# -# https://pythonclock.org/ -# -# (Two things to double-check first though: make sure PyPy also has the -# optimization, and benchmark to make sure it's a win, since we do have a -# slightly clever thing where we delay calling compress() until we've -# processed a whole event, which could in theory be slightly more efficient -# than the internal bytearray support.) -blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) - - -class ReceiveBuffer: - def __init__(self) -> None: - self._data = bytearray() - self._next_line_search = 0 - self._multiple_lines_search = 0 - - def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": - self._data += byteslike - return self - - def __bool__(self) -> bool: - return bool(len(self)) - - def __len__(self) -> int: - return len(self._data) - - # for @property unprocessed_data - def __bytes__(self) -> bytes: - return bytes(self._data) - - def _extract(self, count: int) -> bytearray: - # extracting an initial slice of the data buffer and return it - out = self._data[:count] - del self._data[:count] - - self._next_line_search = 0 - self._multiple_lines_search = 0 - - return out - - def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: - """ - Extract a fixed number of bytes from the buffer. - """ - out = self._data[:count] - if not out: - return None - - return self._extract(count) - - def maybe_extract_next_line(self) -> Optional[bytearray]: - """ - Extract the first line, if it is completed in the buffer. - """ - # Only search in buffer space that we've not already looked at. - search_start_index = max(0, self._next_line_search - 1) - partial_idx = self._data.find(b"\r\n", search_start_index) - - if partial_idx == -1: - self._next_line_search = len(self._data) - return None - - # + 2 is to compensate len(b"\r\n") - idx = partial_idx + 2 - - return self._extract(idx) - - def maybe_extract_lines(self) -> Optional[List[bytearray]]: - """ - Extract everything up to the first blank line, and return a list of lines. - """ - # Handle the case where we have an immediate empty line. - if self._data[:1] == b"\n": - self._extract(1) - return [] - - if self._data[:2] == b"\r\n": - self._extract(2) - return [] - - # Only search in buffer space that we've not already looked at. - match = blank_line_regex.search(self._data, self._multiple_lines_search) - if match is None: - self._multiple_lines_search = max(0, len(self._data) - 2) - return None - - # Truncate the buffer and return it. - idx = match.span(0)[-1] - out = self._extract(idx) - lines = out.split(b"\n") - - for line in lines: - if line.endswith(b"\r"): - del line[-1] - - assert lines[-2] == lines[-1] == b"" - - del lines[-2:] - - return lines - - # In theory we should wait until `\r\n` before starting to validate - # incoming data. However it's interesting to detect (very) invalid data - # early given they might not even contain `\r\n` at all (hence only - # timeout will get rid of them). - # This is not a 100% effective detection but more of a cheap sanity check - # allowing for early abort in some useful cases. - # This is especially interesting when peer is messing up with HTTPS and - # sent us a TLS stream where we were expecting plain HTTP given all - # versions of TLS so far start handshake with a 0x16 message type code. - def is_next_line_obviously_invalid_request_line(self) -> bool: - try: - # HTTP header line must not contain non-printable characters - # and should not start with a space - return self._data[0] < 0x21 - except IndexError: - return False diff --git a/venv/lib/python3.12/site-packages/h11/_state.py b/venv/lib/python3.12/site-packages/h11/_state.py deleted file mode 100644 index 3ad444b0..00000000 --- a/venv/lib/python3.12/site-packages/h11/_state.py +++ /dev/null @@ -1,365 +0,0 @@ -################################################################ -# The core state machine -################################################################ -# -# Rule 1: everything that affects the state machine and state transitions must -# live here in this file. As much as possible goes into the table-based -# representation, but for the bits that don't quite fit, the actual code and -# state must nonetheless live here. -# -# Rule 2: this file does not know about what role we're playing; it only knows -# about HTTP request/response cycles in the abstract. This ensures that we -# don't cheat and apply different rules to local and remote parties. -# -# -# Theory of operation -# =================== -# -# Possibly the simplest way to think about this is that we actually have 5 -# different state machines here. Yes, 5. These are: -# -# 1) The client state, with its complicated automaton (see the docs) -# 2) The server state, with its complicated automaton (see the docs) -# 3) The keep-alive state, with possible states {True, False} -# 4) The SWITCH_CONNECT state, with possible states {False, True} -# 5) The SWITCH_UPGRADE state, with possible states {False, True} -# -# For (3)-(5), the first state listed is the initial state. -# -# (1)-(3) are stored explicitly in member variables. The last -# two are stored implicitly in the pending_switch_proposals set as: -# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) -# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) -# -# And each of these machines has two different kinds of transitions: -# -# a) Event-triggered -# b) State-triggered -# -# Event triggered is the obvious thing that you'd think it is: some event -# happens, and if it's the right event at the right time then a transition -# happens. But there are somewhat complicated rules for which machines can -# "see" which events. (As a rule of thumb, if a machine "sees" an event, this -# means two things: the event can affect the machine, and if the machine is -# not in a state where it expects that event then it's an error.) These rules -# are: -# -# 1) The client machine sees all h11.events objects emitted by the client. -# -# 2) The server machine sees all h11.events objects emitted by the server. -# -# It also sees the client's Request event. -# -# And sometimes, server events are annotated with a _SWITCH_* event. For -# example, we can have a (Response, _SWITCH_CONNECT) event, which is -# different from a regular Response event. -# -# 3) The keep-alive machine sees the process_keep_alive_disabled() event -# (which is derived from Request/Response events), and this event -# transitions it from True -> False, or from False -> False. There's no way -# to transition back. -# -# 4&5) The _SWITCH_* machines transition from False->True when we get a -# Request that proposes the relevant type of switch (via -# process_client_switch_proposals), and they go from True->False when we -# get a Response that has no _SWITCH_* annotation. -# -# So that's event-triggered transitions. -# -# State-triggered transitions are less standard. What they do here is couple -# the machines together. The way this works is, when certain *joint* -# configurations of states are achieved, then we automatically transition to a -# new *joint* state. So, for example, if we're ever in a joint state with -# -# client: DONE -# keep-alive: False -# -# then the client state immediately transitions to: -# -# client: MUST_CLOSE -# -# This is fundamentally different from an event-based transition, because it -# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state -# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive -# transitioned True -> False. Either way, once this precondition is satisfied, -# this transition is immediately triggered. -# -# What if two conflicting state-based transitions get enabled at the same -# time? In practice there's only one case where this arises (client DONE -> -# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by -# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. -# -# Implementation -# -------------- -# -# The event-triggered transitions for the server and client machines are all -# stored explicitly in a table. Ditto for the state-triggered transitions that -# involve just the server and client state. -# -# The transitions for the other machines, and the state-triggered transitions -# that involve the other machines, are written out as explicit Python code. -# -# It'd be nice if there were some cleaner way to do all this. This isn't -# *too* terrible, but I feel like it could probably be better. -# -# WARNING -# ------- -# -# The script that generates the state machine diagrams for the docs knows how -# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS -# tables. But it can't automatically read the transitions that are written -# directly in Python code. So if you touch those, you need to also update the -# script to keep it in sync! -from typing import cast, Dict, Optional, Set, Tuple, Type, Union - -from ._events import * -from ._util import LocalProtocolError, Sentinel - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = [ - "CLIENT", - "SERVER", - "IDLE", - "SEND_RESPONSE", - "SEND_BODY", - "DONE", - "MUST_CLOSE", - "CLOSED", - "MIGHT_SWITCH_PROTOCOL", - "SWITCHED_PROTOCOL", - "ERROR", -] - - -class CLIENT(Sentinel, metaclass=Sentinel): - pass - - -class SERVER(Sentinel, metaclass=Sentinel): - pass - - -# States -class IDLE(Sentinel, metaclass=Sentinel): - pass - - -class SEND_RESPONSE(Sentinel, metaclass=Sentinel): - pass - - -class SEND_BODY(Sentinel, metaclass=Sentinel): - pass - - -class DONE(Sentinel, metaclass=Sentinel): - pass - - -class MUST_CLOSE(Sentinel, metaclass=Sentinel): - pass - - -class CLOSED(Sentinel, metaclass=Sentinel): - pass - - -class ERROR(Sentinel, metaclass=Sentinel): - pass - - -# Switch types -class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): - pass - - -class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): - pass - - -class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): - pass - - -class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): - pass - - -EventTransitionType = Dict[ - Type[Sentinel], - Dict[ - Type[Sentinel], - Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], - ], -] - -EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { - CLIENT: { - IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, - SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, - DONE: {ConnectionClosed: CLOSED}, - MUST_CLOSE: {ConnectionClosed: CLOSED}, - CLOSED: {ConnectionClosed: CLOSED}, - MIGHT_SWITCH_PROTOCOL: {}, - SWITCHED_PROTOCOL: {}, - ERROR: {}, - }, - SERVER: { - IDLE: { - ConnectionClosed: CLOSED, - Response: SEND_BODY, - # Special case: server sees client Request events, in this form - (Request, CLIENT): SEND_RESPONSE, - }, - SEND_RESPONSE: { - InformationalResponse: SEND_RESPONSE, - Response: SEND_BODY, - (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, - (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, - }, - SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, - DONE: {ConnectionClosed: CLOSED}, - MUST_CLOSE: {ConnectionClosed: CLOSED}, - CLOSED: {ConnectionClosed: CLOSED}, - SWITCHED_PROTOCOL: {}, - ERROR: {}, - }, -} - -StateTransitionType = Dict[ - Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] -] - -# NB: there are also some special-case state-triggered transitions hard-coded -# into _fire_state_triggered_transitions below. -STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { - # (Client state, Server state) -> new states - # Protocol negotiation - (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, - # Socket shutdown - (CLOSED, DONE): {SERVER: MUST_CLOSE}, - (CLOSED, IDLE): {SERVER: MUST_CLOSE}, - (ERROR, DONE): {SERVER: MUST_CLOSE}, - (DONE, CLOSED): {CLIENT: MUST_CLOSE}, - (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, - (DONE, ERROR): {CLIENT: MUST_CLOSE}, -} - - -class ConnectionState: - def __init__(self) -> None: - # Extra bits of state that don't quite fit into the state model. - - # If this is False then it enables the automatic DONE -> MUST_CLOSE - # transition. Don't set this directly; call .keep_alive_disabled() - self.keep_alive = True - - # This is a subset of {UPGRADE, CONNECT}, containing the proposals - # made by the client for switching protocols. - self.pending_switch_proposals: Set[Type[Sentinel]] = set() - - self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} - - def process_error(self, role: Type[Sentinel]) -> None: - self.states[role] = ERROR - self._fire_state_triggered_transitions() - - def process_keep_alive_disabled(self) -> None: - self.keep_alive = False - self._fire_state_triggered_transitions() - - def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: - self.pending_switch_proposals.add(switch_event) - self._fire_state_triggered_transitions() - - def process_event( - self, - role: Type[Sentinel], - event_type: Type[Event], - server_switch_event: Optional[Type[Sentinel]] = None, - ) -> None: - _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type - if server_switch_event is not None: - assert role is SERVER - if server_switch_event not in self.pending_switch_proposals: - raise LocalProtocolError( - "Received server _SWITCH_UPGRADE event without a pending proposal" - ) - _event_type = (event_type, server_switch_event) - if server_switch_event is None and _event_type is Response: - self.pending_switch_proposals = set() - self._fire_event_triggered_transitions(role, _event_type) - # Special case: the server state does get to see Request - # events. - if _event_type is Request: - assert role is CLIENT - self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) - self._fire_state_triggered_transitions() - - def _fire_event_triggered_transitions( - self, - role: Type[Sentinel], - event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], - ) -> None: - state = self.states[role] - try: - new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] - except KeyError: - event_type = cast(Type[Event], event_type) - raise LocalProtocolError( - "can't handle event type {} when role={} and state={}".format( - event_type.__name__, role, self.states[role] - ) - ) from None - self.states[role] = new_state - - def _fire_state_triggered_transitions(self) -> None: - # We apply these rules repeatedly until converging on a fixed point - while True: - start_states = dict(self.states) - - # It could happen that both these special-case transitions are - # enabled at the same time: - # - # DONE -> MIGHT_SWITCH_PROTOCOL - # DONE -> MUST_CLOSE - # - # For example, this will always be true of a HTTP/1.0 client - # requesting CONNECT. If this happens, the protocol switch takes - # priority. From there the client will either go to - # SWITCHED_PROTOCOL, in which case it's none of our business when - # they close the connection, or else the server will deny the - # request, in which case the client will go back to DONE and then - # from there to MUST_CLOSE. - if self.pending_switch_proposals: - if self.states[CLIENT] is DONE: - self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL - - if not self.pending_switch_proposals: - if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: - self.states[CLIENT] = DONE - - if not self.keep_alive: - for role in (CLIENT, SERVER): - if self.states[role] is DONE: - self.states[role] = MUST_CLOSE - - # Tabular state-triggered transitions - joint_state = (self.states[CLIENT], self.states[SERVER]) - changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) - self.states.update(changes) - - if self.states == start_states: - # Fixed point reached - return - - def start_next_cycle(self) -> None: - if self.states != {CLIENT: DONE, SERVER: DONE}: - raise LocalProtocolError( - f"not in a reusable state. self.states={self.states}" - ) - # Can't reach DONE/DONE with any of these active, but still, let's be - # sure. - assert self.keep_alive - assert not self.pending_switch_proposals - self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/venv/lib/python3.12/site-packages/h11/_util.py b/venv/lib/python3.12/site-packages/h11/_util.py deleted file mode 100644 index 67184452..00000000 --- a/venv/lib/python3.12/site-packages/h11/_util.py +++ /dev/null @@ -1,135 +0,0 @@ -from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union - -__all__ = [ - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", - "validate", - "bytesify", -] - - -class ProtocolError(Exception): - """Exception indicating a violation of the HTTP/1.1 protocol. - - This as an abstract base class, with two concrete base classes: - :exc:`LocalProtocolError`, which indicates that you tried to do something - that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which - indicates that the remote peer tried to do something that HTTP/1.1 says is - illegal. See :ref:`error-handling` for details. - - In addition to the normal :exc:`Exception` features, it has one attribute: - - .. attribute:: error_status_hint - - This gives a suggestion as to what status code a server might use if - this error occurred as part of a request. - - For a :exc:`RemoteProtocolError`, this is useful as a suggestion for - how you might want to respond to a misbehaving peer, if you're - implementing a server. - - For a :exc:`LocalProtocolError`, this can be taken as a suggestion for - how your peer might have responded to *you* if h11 had allowed you to - continue. - - The default is 400 Bad Request, a generic catch-all for protocol - violations. - - """ - - def __init__(self, msg: str, error_status_hint: int = 400) -> None: - if type(self) is ProtocolError: - raise TypeError("tried to directly instantiate ProtocolError") - Exception.__init__(self, msg) - self.error_status_hint = error_status_hint - - -# Strategy: there are a number of public APIs where a LocalProtocolError can -# be raised (send(), all the different event constructors, ...), and only one -# public API where RemoteProtocolError can be raised -# (receive_data()). Therefore we always raise LocalProtocolError internally, -# and then receive_data will translate this into a RemoteProtocolError. -# -# Internally: -# LocalProtocolError is the generic "ProtocolError". -# Externally: -# LocalProtocolError is for local errors and RemoteProtocolError is for -# remote errors. -class LocalProtocolError(ProtocolError): - def _reraise_as_remote_protocol_error(self) -> NoReturn: - # After catching a LocalProtocolError, use this method to re-raise it - # as a RemoteProtocolError. This method must be called from inside an - # except: block. - # - # An easy way to get an equivalent RemoteProtocolError is just to - # modify 'self' in place. - self.__class__ = RemoteProtocolError # type: ignore - # But the re-raising is somewhat non-trivial -- you might think that - # now that we've modified the in-flight exception object, that just - # doing 'raise' to re-raise it would be enough. But it turns out that - # this doesn't work, because Python tracks the exception type - # (exc_info[0]) separately from the exception object (exc_info[1]), - # and we only modified the latter. So we really do need to re-raise - # the new type explicitly. - # On py3, the traceback is part of the exception object, so our - # in-place modification preserved it and we can just re-raise: - raise self - - -class RemoteProtocolError(ProtocolError): - pass - - -def validate( - regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any -) -> Dict[str, bytes]: - match = regex.fullmatch(data) - if not match: - if format_args: - msg = msg.format(*format_args) - raise LocalProtocolError(msg) - return match.groupdict() - - -# Sentinel values -# -# - Inherit identity-based comparison and hashing from object -# - Have a nice repr -# - Have a *bonus property*: type(sentinel) is sentinel -# -# The bonus property is useful if you want to take the return value from -# next_event() and do some sort of dispatch based on type(event). - -_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") - - -class Sentinel(type): - def __new__( - cls: Type[_T_Sentinel], - name: str, - bases: Tuple[type, ...], - namespace: Dict[str, Any], - **kwds: Any - ) -> _T_Sentinel: - assert bases == (Sentinel,) - v = super().__new__(cls, name, bases, namespace, **kwds) - v.__class__ = v # type: ignore - return v - - def __repr__(self) -> str: - return self.__name__ - - -# Used for methods, request targets, HTTP versions, header names, and header -# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always -# returns bytes. -def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: - # Fast-path: - if type(s) is bytes: - return s - if isinstance(s, str): - s = s.encode("ascii") - if isinstance(s, int): - raise TypeError("expected bytes-like object, not int") - return bytes(s) diff --git a/venv/lib/python3.12/site-packages/h11/_version.py b/venv/lib/python3.12/site-packages/h11/_version.py deleted file mode 100644 index 76e7327b..00000000 --- a/venv/lib/python3.12/site-packages/h11/_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file must be kept very simple, because it is consumed from several -# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. - -# We use a simple scheme: -# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev -# where the +dev versions are never released into the wild, they're just what -# we stick into the VCS in between releases. -# -# This is compatible with PEP 440: -# http://legacy.python.org/dev/peps/pep-0440/ -# via the use of the "local suffix" "+dev", which is disallowed on index -# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we -# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* -# 1.0.0.) - -__version__ = "0.16.0" diff --git a/venv/lib/python3.12/site-packages/h11/_writers.py b/venv/lib/python3.12/site-packages/h11/_writers.py deleted file mode 100644 index 939cdb91..00000000 --- a/venv/lib/python3.12/site-packages/h11/_writers.py +++ /dev/null @@ -1,145 +0,0 @@ -# Code to read HTTP data -# -# Strategy: each writer takes an event + a write-some-bytes function, which is -# calls. -# -# WRITERS is a dict describing how to pick a reader. It maps states to either: -# - a writer -# - or, for body writers, a dict of framin-dependent writer factories - -from typing import Any, Callable, Dict, List, Tuple, Type, Union - -from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response -from ._headers import Headers -from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER -from ._util import LocalProtocolError, Sentinel - -__all__ = ["WRITERS"] - -Writer = Callable[[bytes], Any] - - -def write_headers(headers: Headers, write: Writer) -> None: - # "Since the Host field-value is critical information for handling a - # request, a user agent SHOULD generate Host as the first header field - # following the request-line." - RFC 7230 - raw_items = headers._full_items - for raw_name, name, value in raw_items: - if name == b"host": - write(b"%s: %s\r\n" % (raw_name, value)) - for raw_name, name, value in raw_items: - if name != b"host": - write(b"%s: %s\r\n" % (raw_name, value)) - write(b"\r\n") - - -def write_request(request: Request, write: Writer) -> None: - if request.http_version != b"1.1": - raise LocalProtocolError("I only send HTTP/1.1") - write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) - write_headers(request.headers, write) - - -# Shared between InformationalResponse and Response -def write_any_response( - response: Union[InformationalResponse, Response], write: Writer -) -> None: - if response.http_version != b"1.1": - raise LocalProtocolError("I only send HTTP/1.1") - status_bytes = str(response.status_code).encode("ascii") - # We don't bother sending ascii status messages like "OK"; they're - # optional and ignored by the protocol. (But the space after the numeric - # status code is mandatory.) - # - # XX FIXME: could at least make an effort to pull out the status message - # from stdlib's http.HTTPStatus table. Or maybe just steal their enums - # (either by import or copy/paste). We already accept them as status codes - # since they're of type IntEnum < int. - write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) - write_headers(response.headers, write) - - -class BodyWriter: - def __call__(self, event: Event, write: Writer) -> None: - if type(event) is Data: - self.send_data(event.data, write) - elif type(event) is EndOfMessage: - self.send_eom(event.headers, write) - else: # pragma: no cover - assert False - - def send_data(self, data: bytes, write: Writer) -> None: - pass - - def send_eom(self, headers: Headers, write: Writer) -> None: - pass - - -# -# These are all careful not to do anything to 'data' except call len(data) and -# write(data). This allows us to transparently pass-through funny objects, -# like placeholder objects referring to files on disk that will be sent via -# sendfile(2). -# -class ContentLengthWriter(BodyWriter): - def __init__(self, length: int) -> None: - self._length = length - - def send_data(self, data: bytes, write: Writer) -> None: - self._length -= len(data) - if self._length < 0: - raise LocalProtocolError("Too much data for declared Content-Length") - write(data) - - def send_eom(self, headers: Headers, write: Writer) -> None: - if self._length != 0: - raise LocalProtocolError("Too little data for declared Content-Length") - if headers: - raise LocalProtocolError("Content-Length and trailers don't mix") - - -class ChunkedWriter(BodyWriter): - def send_data(self, data: bytes, write: Writer) -> None: - # if we encoded 0-length data in the naive way, it would look like an - # end-of-message. - if not data: - return - write(b"%x\r\n" % len(data)) - write(data) - write(b"\r\n") - - def send_eom(self, headers: Headers, write: Writer) -> None: - write(b"0\r\n") - write_headers(headers, write) - - -class Http10Writer(BodyWriter): - def send_data(self, data: bytes, write: Writer) -> None: - write(data) - - def send_eom(self, headers: Headers, write: Writer) -> None: - if headers: - raise LocalProtocolError("can't send trailers to HTTP/1.0 client") - # no need to close the socket ourselves, that will be taken care of by - # Connection: close machinery - - -WritersType = Dict[ - Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], - Union[ - Dict[str, Type[BodyWriter]], - Callable[[Union[InformationalResponse, Response], Writer], None], - Callable[[Request, Writer], None], - ], -] - -WRITERS: WritersType = { - (CLIENT, IDLE): write_request, - (SERVER, IDLE): write_any_response, - (SERVER, SEND_RESPONSE): write_any_response, - SEND_BODY: { - "chunked": ChunkedWriter, - "content-length": ContentLengthWriter, - "http/1.0": Http10Writer, - }, -} diff --git a/venv/lib/python3.12/site-packages/h11/py.typed b/venv/lib/python3.12/site-packages/h11/py.typed deleted file mode 100644 index f5642f79..00000000 --- a/venv/lib/python3.12/site-packages/h11/py.typed +++ /dev/null @@ -1 +0,0 @@ -Marker diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA deleted file mode 100644 index 8056834e..00000000 --- a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/METADATA +++ /dev/null @@ -1,625 +0,0 @@ -Metadata-Version: 2.4 -Name: httpcore -Version: 1.0.9 -Summary: A minimal low-level HTTP client. -Project-URL: Documentation, https://www.encode.io/httpcore -Project-URL: Homepage, https://www.encode.io/httpcore/ -Project-URL: Source, https://github.com/encode/httpcore -Author-email: Tom Christie -License-Expression: BSD-3-Clause -License-File: LICENSE.md -Classifier: Development Status :: 3 - Alpha -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: Trio -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.8 -Requires-Dist: certifi -Requires-Dist: h11>=0.16 -Provides-Extra: asyncio -Requires-Dist: anyio<5.0,>=4.0; extra == 'asyncio' -Provides-Extra: http2 -Requires-Dist: h2<5,>=3; extra == 'http2' -Provides-Extra: socks -Requires-Dist: socksio==1.*; extra == 'socks' -Provides-Extra: trio -Requires-Dist: trio<1.0,>=0.22.0; extra == 'trio' -Description-Content-Type: text/markdown - -# HTTP Core - -[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) -[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) - -> *Do one thing, and do it well.* - -The HTTP Core package provides a minimal low-level HTTP client, which does -one thing only. Sending HTTP requests. - -It does not provide any high level model abstractions over the API, -does not handle redirects, multipart uploads, building authentication headers, -transparent HTTP caching, URL parsing, session cookie handling, -content or charset decoding, handling JSON, environment based configuration -defaults, or any of that Jazz. - -Some things HTTP Core does do: - -* Sending HTTP requests. -* Thread-safe / task-safe connection pooling. -* HTTP(S) proxy & SOCKS proxy support. -* Supports HTTP/1.1 and HTTP/2. -* Provides both sync and async interfaces. -* Async backend support for `asyncio` and `trio`. - -## Requirements - -Python 3.8+ - -## Installation - -For HTTP/1.1 only support, install with: - -```shell -$ pip install httpcore -``` - -There are also a number of optional extras available... - -```shell -$ pip install httpcore['asyncio,trio,http2,socks'] -``` - -## Sending requests - -Send an HTTP request: - -```python -import httpcore - -response = httpcore.request("GET", "https://www.example.com/") - -print(response) -# -print(response.status) -# 200 -print(response.headers) -# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] -print(response.content) -# b'\n\n\nExample Domain\n\n\n ...' -``` - -The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. - -```python -import httpcore - -http = httpcore.ConnectionPool() -response = http.request("GET", "https://www.example.com/") -``` - -Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). - -## Motivation - -You *probably* don't want to be using HTTP Core directly. It might make sense if -you're writing something like a proxy service in Python, and you just want -something at the lowest possible level, but more typically you'll want to use -a higher level client library, such as `httpx`. - -The motivation for `httpcore` is: - -* To provide a reusable low-level client library, that other packages can then build on top of. -* To provide a *really clear interface split* between the networking code and client logic, - so that each is easier to understand and reason about in isolation. - -## Dependencies - -The `httpcore` package has the following dependencies... - -* `h11` -* `certifi` - -And the following optional extras... - -* `anyio` - Required by `pip install httpcore['asyncio']`. -* `trio` - Required by `pip install httpcore['trio']`. -* `h2` - Required by `pip install httpcore['http2']`. -* `socksio` - Required by `pip install httpcore['socks']`. - -## Versioning - -We use [SEMVER for our versioning policy](https://semver.org/). - -For changes between package versions please see our [project changelog](CHANGELOG.md). - -We recommend pinning your requirements either the most current major version, or a more specific version range: - -```python -pip install 'httpcore==1.*' -``` -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## Version 1.0.9 (April 24th, 2025) - -- Resolve https://github.com/advisories/GHSA-vqfr-h8mv-ghfj with h11 dependency update. (#1008) - -## Version 1.0.8 (April 11th, 2025) - -- Fix `AttributeError` when importing on Python 3.14. (#1005) - -## Version 1.0.7 (November 15th, 2024) - -- Support `proxy=…` configuration on `ConnectionPool()`. (#974) - -## Version 1.0.6 (October 1st, 2024) - -- Relax `trio` dependency pinning. (#956) -- Handle `trio` raising `NotImplementedError` on unsupported platforms. (#955) -- Handle mapping `ssl.SSLError` to `httpcore.ConnectError`. (#918) - -## 1.0.5 (March 27th, 2024) - -- Handle `EndOfStream` exception for anyio backend. (#899) -- Allow trio `0.25.*` series in package dependancies. (#903) - -## 1.0.4 (February 21st, 2024) - -- Add `target` request extension. (#888) -- Fix support for connection `Upgrade` and `CONNECT` when some data in the stream has been read. (#882) - -## 1.0.3 (February 13th, 2024) - -- Fix support for async cancellations. (#880) -- Fix trace extension when used with socks proxy. (#849) -- Fix SSL context for connections using the "wss" scheme (#869) - -## 1.0.2 (November 10th, 2023) - -- Fix `float("inf")` timeouts in `Event.wait` function. (#846) - -## 1.0.1 (November 3rd, 2023) - -- Fix pool timeout to account for the total time spent retrying. (#823) -- Raise a neater RuntimeError when the correct async deps are not installed. (#826) -- Add support for synchronous TLS-in-TLS streams. (#840) - -## 1.0.0 (October 6th, 2023) - -From version 1.0 our async support is now optional, as the package has minimal dependencies by default. - -For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. - -The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. - -- Async support becomes fully optional. (#809) -- Add support for Python 3.12. (#807) - -## 0.18.0 (September 8th, 2023) - -- Add support for HTTPS proxies. (#745, #786) -- Drop Python 3.7 support. (#727) -- Handle `sni_hostname` extension with SOCKS proxy. (#774) -- Handle HTTP/1.1 half-closed connections gracefully. (#641) -- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) - -## 0.17.3 (July 5th, 2023) - -- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) -- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) -- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) -- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) -- Drop Python 3.7 support. (#727) - -## 0.17.2 (May 23th, 2023) - -- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) -- Improve logging with per-module logger names. (#690) -- Add `sni_hostname` request extension. (#696) -- Resolve race condition during import of `anyio` package. (#692) -- Enable TCP_NODELAY for all synchronous sockets. (#651) - -## 0.17.1 (May 17th, 2023) - -- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) -- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) -- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) -- Fix edge-case exception when removing requests from the connection pool. (#680) -- Fix pool timeout edge-case. (#688) - -## 0.17.0 (March 16th, 2023) - -- Add DEBUG level logging. (#648) -- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) -- Increase the allowable HTTP header size to 100kB. (#647) -- Add `retries` option to SOCKS proxy classes. (#643) - -## 0.16.3 (December 20th, 2022) - -- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) -- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) -- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) -- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) - -## 0.16.2 (November 25th, 2022) - -- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) -- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) - -## 0.16.1 (November 17th, 2022) - -- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) - -## 0.16.0 (October 11th, 2022) - -- Support HTTP/1.1 informational responses. (#581) -- Fix async cancellation behaviour. (#580) -- Support `h11` 0.14. (#579) - -## 0.15.0 (May 17th, 2022) - -- Drop Python 3.6 support (#535) -- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) -- Switch to explicit `typing.Optional` for type hints. (#513) -- For `trio` map OSError exceptions to `ConnectError`. (#543) - -## 0.14.7 (February 4th, 2022) - -- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) -- Fix AttributeError that happened when Socks5Connection were terminated. (#501) - -## 0.14.6 (February 1st, 2022) - -- Fix SOCKS support for `http://` URLs. (#492) -- Resolve race condition around exceptions during streaming a response. (#491) - -## 0.14.5 (January 18th, 2022) - -- SOCKS proxy support. (#478) -- Add proxy_auth argument to HTTPProxy. (#481) -- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) - -## 0.14.4 (January 5th, 2022) - -- Support HTTP/2 on HTTPS tunnelling proxies. (#468) -- Fix proxy headers missing on HTTP forwarding. (#456) -- Only instantiate SSL context if required. (#457) -- More robust HTTP/2 handling. (#253, #439, #440, #441) - -## 0.14.3 (November 17th, 2021) - -- Fix race condition when removing closed connections from the pool. (#437) - -## 0.14.2 (November 16th, 2021) - -- Failed connections no longer remain in the pool. (Pull #433) - -## 0.14.1 (November 12th, 2021) - -- `max_connections` becomes optional. (Pull #429) -- `certifi` is now included in the install dependancies. (Pull #428) -- `h2` is now strictly optional. (Pull #428) - -## 0.14.0 (November 11th, 2021) - -The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. - -Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. - -See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. - -There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... - -* Log the point at which the connection is established, and the IP/port on which it is made. -* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) -* Log SSL version info / certificate info. - -Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. - -## 0.13.7 (September 13th, 2021) - -- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) - -## 0.13.6 (June 15th, 2021) - -### Fixed - -- Close sockets when read or write timeouts occur. (Pull #365) - -## 0.13.5 (June 14th, 2021) - -### Fixed - -- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) - -## 0.13.4 (June 9th, 2021) - -### Added - -- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) - -### Fixed - -- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). - -## 0.13.3 (May 6th, 2021) - -### Added - -- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) - -### Fixed - -- Handle cases where environment does not provide `select.poll` support. (Pull #331) - -## 0.13.2 (April 29th, 2021) - -### Added - -- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) - -## 0.13.1 (April 28th, 2021) - -### Fixed - -- More resiliant testing for closed connections. (Pull #311) -- Don't raise exceptions on ungraceful connection closes. (Pull #310) - -## 0.13.0 (April 21st, 2021) - -The 0.13 release updates the core API in order to match the HTTPX Transport API, -introduced in HTTPX 0.18 onwards. - -An example of making requests with the new interface is: - -```python -with httpcore.SyncConnectionPool() as http: - status_code, headers, stream, extensions = http.handle_request( - method=b'GET', - url=(b'https', b'example.org', 443, b'/'), - headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] - stream=httpcore.ByteStream(b''), - extensions={} - ) - body = stream.read() - print(status_code, body) -``` - -### Changed - -- The `.request()` method is now `handle_request()`. (Pull #296) -- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) -- The `headers` argument is no longer optional. (Pull #296) -- The `stream` argument is no longer optional. (Pull #296) -- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) -- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) -- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) -- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) - -### Added - -- Streams now support a `.read()` interface. (Pull #296) - -### Fixed - -- Task cancellation no longer leaks connections from the connection pool. (Pull #305) - -## 0.12.3 (December 7th, 2020) - -### Fixed - -- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) -- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) -- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) -- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) -- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) - -## 0.12.2 (November 20th, 2020) - -### Fixed - -- Properly wrap connect errors on the asyncio backend. (Pull #235) -- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) - -## 0.12.1 (November 7th, 2020) - -### Added - -- Add connect retries. (Pull #221) - -### Fixed - -- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) -- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) -- Properly wrap OS errors when using `trio`. (Pull #225) - -## 0.12.0 (October 6th, 2020) - -### Changed - -- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) - -### Added - -- Add Python 3.9 to officially supported versions. - -### Fixed - -- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) - -## 0.11.1 (September 28nd, 2020) - -### Fixed - -- Add await to async semaphore release() coroutine (#197) -- Drop incorrect curio classifier (#192) - -## 0.11.0 (September 22nd, 2020) - -The Transport API with 0.11.0 has a couple of significant changes. - -Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features -such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. - -The interface changes from: - -```python -def request(method, url, headers, stream, timeout): - return (http_version, status_code, reason, headers, stream) -``` - -To instead including an optional dictionary of extensions on the request and response: - -```python -def request(method, url, headers, stream, ext): - return (status_code, headers, stream, ext) -``` - -Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. - -In particular: - -* Trailing headers support. -* HTTP/2 Server Push -* sendfile. -* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. -* Exposing debug information out of the API, including template name, template context. - -Currently extensions are limited to: - -* request: `timeout` - Optional. Timeout dictionary. -* response: `http_version` - Optional. Include the HTTP version used on the response. -* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. - -See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. - -Secondly, the async version of `request` is now namespaced as `arequest`. - -This allows concrete transports to support both sync and async implementations on the same class. - -### Added - -- Add curio support. (Pull #168) -- Add anyio support, with `backend="anyio"`. (Pull #169) - -### Changed - -- Update the Transport API to use 'ext' for optional extensions. (Pull #190) -- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) - -## 0.10.2 (August 20th, 2020) - -### Added - -- Added Unix Domain Socket support. (Pull #139) - -### Fixed - -- Always include the port on proxy CONNECT requests. (Pull #154) -- Fix `max_keepalive_connections` configuration. (Pull #153) -- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) - -## 0.10.1 (August 7th, 2020) - -- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. - -## 0.10.0 (August 7th, 2020) - -The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. - -Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. - -### Added - -- HTTP/2 support becomes optional. (Pull #121, #130) -- Add `local_address=...` support. (Pull #100, #134) -- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) -- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) -- Add `UnsupportedProtocol` exception. (Pull #128) -- Add `.get_connection_info()` method. (Pull #102, #137) -- Add better TRACE logs. (Pull #101) - -### Changed - -- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) - -### Fixed - -- Improve handling of server disconnects. (Pull #112) - -## 0.9.1 (May 27th, 2020) - -### Fixed - -- Proper host resolution for sync case, including IPv6 support. (Pull #97) -- Close outstanding connections when connection pool is closed. (Pull #98) - -## 0.9.0 (May 21th, 2020) - -### Changed - -- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) - -### Fixed - -- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) -- Remove incorrect debug log. (Pull #83) - -## 0.8.4 (May 11th, 2020) - -### Added - -- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables -and TRACE level logging. (Pull #79) - -### Fixed - -- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) - -## 0.8.3 (May 6rd, 2020) - -### Fixed - -- Include `Host` and `Accept` headers on proxy "CONNECT" requests. -- De-duplicate any headers also contained in proxy_headers. -- HTTP/2 flag not being passed down to proxy connections. - -## 0.8.2 (May 3rd, 2020) - -### Fixed - -- Fix connections using proxy forwarding requests not being added to the -connection pool properly. (Pull #70) - -## 0.8.1 (April 30th, 2020) - -### Changed - -- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. - -## 0.8.0 (April 30th, 2020) - -### Fixed - -- Fixed tunnel proxy support. - -### Added - -- New `TimeoutException` base class. - -## 0.7.0 (March 5th, 2020) - -- First integration with HTTPX. diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD deleted file mode 100644 index 44d4769e..00000000 --- a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/RECORD +++ /dev/null @@ -1,68 +0,0 @@ -httpcore-1.0.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -httpcore-1.0.9.dist-info/METADATA,sha256=_i1P2mGZEol4d54M8n88BFxTGGP83Zh-rMdPOhjUHCE,21529 -httpcore-1.0.9.dist-info/RECORD,, -httpcore-1.0.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 -httpcore-1.0.9.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 -httpcore/__init__.py,sha256=9kT_kqChCCJUTHww24ZmR_ezcdbpRYWksD-gYNzkZP8,3445 -httpcore/__pycache__/__init__.cpython-312.pyc,, -httpcore/__pycache__/_api.cpython-312.pyc,, -httpcore/__pycache__/_exceptions.cpython-312.pyc,, -httpcore/__pycache__/_models.cpython-312.pyc,, -httpcore/__pycache__/_ssl.cpython-312.pyc,, -httpcore/__pycache__/_synchronization.cpython-312.pyc,, -httpcore/__pycache__/_trace.cpython-312.pyc,, -httpcore/__pycache__/_utils.cpython-312.pyc,, -httpcore/_api.py,sha256=unZmeDschBWCGCPCwkS3Wot9euK6bg_kKxLtGTxw214,3146 -httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 -httpcore/_async/__pycache__/__init__.cpython-312.pyc,, -httpcore/_async/__pycache__/connection.cpython-312.pyc,, -httpcore/_async/__pycache__/connection_pool.cpython-312.pyc,, -httpcore/_async/__pycache__/http11.cpython-312.pyc,, -httpcore/_async/__pycache__/http2.cpython-312.pyc,, -httpcore/_async/__pycache__/http_proxy.cpython-312.pyc,, -httpcore/_async/__pycache__/interfaces.cpython-312.pyc,, -httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc,, -httpcore/_async/connection.py,sha256=6OcPXqMEfc0BU38_-iHUNDd1vKSTc2UVT09XqNb_BOk,8449 -httpcore/_async/connection_pool.py,sha256=DOIQ2s2ZCf9qfwxhzMprTPLqCL8OxGXiKF6qRHxvVyY,17307 -httpcore/_async/http11.py,sha256=-qM9bV7PjSQF5vxs37-eUXOIFwbIjPcZbNliuX9TtBw,13880 -httpcore/_async/http2.py,sha256=azX1fcmtXaIwjputFlZ4vd92J8xwjGOa9ax9QIv4394,23936 -httpcore/_async/http_proxy.py,sha256=2zVkrlv-Ds-rWGaqaXlrhEJiAQFPo23BT3Gq_sWoBXU,14701 -httpcore/_async/interfaces.py,sha256=jTiaWL83pgpGC9ziv90ZfwaKNMmHwmOalzaKiuTxATo,4455 -httpcore/_async/socks_proxy.py,sha256=lLKgLlggPfhFlqi0ODeBkOWvt9CghBBUyqsnsU1tx6Q,13841 -httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httpcore/_backends/__pycache__/__init__.cpython-312.pyc,, -httpcore/_backends/__pycache__/anyio.cpython-312.pyc,, -httpcore/_backends/__pycache__/auto.cpython-312.pyc,, -httpcore/_backends/__pycache__/base.cpython-312.pyc,, -httpcore/_backends/__pycache__/mock.cpython-312.pyc,, -httpcore/_backends/__pycache__/sync.cpython-312.pyc,, -httpcore/_backends/__pycache__/trio.cpython-312.pyc,, -httpcore/_backends/anyio.py,sha256=x8PgEhXRC8bVqsdzk_YJx8Y6d9Tub06CuUSwnbmtqoY,5252 -httpcore/_backends/auto.py,sha256=zO136PKZmsaTDK-HRk84eA-MUg8_2wJf4NvmK432Aio,1662 -httpcore/_backends/base.py,sha256=aShgRdZnMmRhFWHetjumlM73f8Kz1YOAyCUP_4kHslA,3042 -httpcore/_backends/mock.py,sha256=er9T436uSe7NLrfiLa4x6Nuqg5ivQ693CxWYCWsgbH4,4077 -httpcore/_backends/sync.py,sha256=bhE4d9iK9Umxdsdsgm2EfKnXaBms2WggGYU-7jmUujU,7977 -httpcore/_backends/trio.py,sha256=LHu4_Mr5MswQmmT3yE4oLgf9b_JJfeVS4BjDxeJc7Ro,5996 -httpcore/_exceptions.py,sha256=looCKga3_YVYu3s-d3L9RMPRJyhsY7fiuuGxvkOD0c0,1184 -httpcore/_models.py,sha256=IO2CcXcdpovRcLTdGFGB6RyBZdEm2h_TOmoCc4rEKho,17623 -httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 -httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 -httpcore/_sync/__pycache__/__init__.cpython-312.pyc,, -httpcore/_sync/__pycache__/connection.cpython-312.pyc,, -httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc,, -httpcore/_sync/__pycache__/http11.cpython-312.pyc,, -httpcore/_sync/__pycache__/http2.cpython-312.pyc,, -httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc,, -httpcore/_sync/__pycache__/interfaces.cpython-312.pyc,, -httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc,, -httpcore/_sync/connection.py,sha256=9exGOb3PB-Mp2T1-sckSeL2t-tJ_9-NXomV8ihmWCgU,8238 -httpcore/_sync/connection_pool.py,sha256=a-T8LTsUxc7r0Ww1atfHSDoWPjQ0fA8Ul7S3-F0Mj70,16955 -httpcore/_sync/http11.py,sha256=IFobD1Md5JFlJGKWnh1_Q3epikUryI8qo09v8MiJIEA,13476 -httpcore/_sync/http2.py,sha256=AxU4yhcq68Bn5vqdJYtiXKYUj7nvhYbxz3v4rT4xnvA,23400 -httpcore/_sync/http_proxy.py,sha256=_al_6crKuEZu2wyvu493RZImJdBJnj5oGKNjLOJL2Zo,14463 -httpcore/_sync/interfaces.py,sha256=snXON42vUDHO5JBJvo8D4VWk2Wat44z2OXXHDrjbl94,4344 -httpcore/_sync/socks_proxy.py,sha256=zegZW9Snqj2_992DFJa8_CppOVBkVL4AgwduRkStakQ,13614 -httpcore/_synchronization.py,sha256=zSi13mAColBnknjZBknUC6hKNDQT4C6ijnezZ-r0T2s,9434 -httpcore/_trace.py,sha256=ck6ZoIzYTkdNAIfq5MGeKqBXDtqjOX-qfYwmZFbrGco,3952 -httpcore/_utils.py,sha256=_RLgXYOAYC350ikALV59GZ68IJrdocRZxPs9PjmzdFY,1537 -httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL deleted file mode 100644 index 12228d41..00000000 --- a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.27.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md deleted file mode 100644 index 311b2b56..00000000 --- a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md +++ /dev/null @@ -1,27 +0,0 @@ -Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/httpcore/__init__.py b/venv/lib/python3.12/site-packages/httpcore/__init__.py deleted file mode 100644 index 9a92dc4a..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/__init__.py +++ /dev/null @@ -1,141 +0,0 @@ -from ._api import request, stream -from ._async import ( - AsyncConnectionInterface, - AsyncConnectionPool, - AsyncHTTP2Connection, - AsyncHTTP11Connection, - AsyncHTTPConnection, - AsyncHTTPProxy, - AsyncSOCKSProxy, -) -from ._backends.base import ( - SOCKET_OPTION, - AsyncNetworkBackend, - AsyncNetworkStream, - NetworkBackend, - NetworkStream, -) -from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream -from ._backends.sync import SyncBackend -from ._exceptions import ( - ConnectError, - ConnectionNotAvailable, - ConnectTimeout, - LocalProtocolError, - NetworkError, - PoolTimeout, - ProtocolError, - ProxyError, - ReadError, - ReadTimeout, - RemoteProtocolError, - TimeoutException, - UnsupportedProtocol, - WriteError, - WriteTimeout, -) -from ._models import URL, Origin, Proxy, Request, Response -from ._ssl import default_ssl_context -from ._sync import ( - ConnectionInterface, - ConnectionPool, - HTTP2Connection, - HTTP11Connection, - HTTPConnection, - HTTPProxy, - SOCKSProxy, -) - -# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. -try: - from ._backends.anyio import AnyIOBackend -except ImportError: # pragma: nocover - - class AnyIOBackend: # type: ignore - def __init__(self, *args, **kwargs): # type: ignore - msg = ( - "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." - ) - raise RuntimeError(msg) - - -# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. -try: - from ._backends.trio import TrioBackend -except ImportError: # pragma: nocover - - class TrioBackend: # type: ignore - def __init__(self, *args, **kwargs): # type: ignore - msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." - raise RuntimeError(msg) - - -__all__ = [ - # top-level requests - "request", - "stream", - # models - "Origin", - "URL", - "Request", - "Response", - "Proxy", - # async - "AsyncHTTPConnection", - "AsyncConnectionPool", - "AsyncHTTPProxy", - "AsyncHTTP11Connection", - "AsyncHTTP2Connection", - "AsyncConnectionInterface", - "AsyncSOCKSProxy", - # sync - "HTTPConnection", - "ConnectionPool", - "HTTPProxy", - "HTTP11Connection", - "HTTP2Connection", - "ConnectionInterface", - "SOCKSProxy", - # network backends, implementations - "SyncBackend", - "AnyIOBackend", - "TrioBackend", - # network backends, mock implementations - "AsyncMockBackend", - "AsyncMockStream", - "MockBackend", - "MockStream", - # network backends, interface - "AsyncNetworkStream", - "AsyncNetworkBackend", - "NetworkStream", - "NetworkBackend", - # util - "default_ssl_context", - "SOCKET_OPTION", - # exceptions - "ConnectionNotAvailable", - "ProxyError", - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", - "UnsupportedProtocol", - "TimeoutException", - "PoolTimeout", - "ConnectTimeout", - "ReadTimeout", - "WriteTimeout", - "NetworkError", - "ConnectError", - "ReadError", - "WriteError", -] - -__version__ = "1.0.9" - - -__locals = locals() -for __name in __all__: - # Exclude SOCKET_OPTION, it causes AttributeError on Python 3.14 - if not __name.startswith(("__", "SOCKET_OPTION")): - setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 683dd470..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc deleted file mode 100644 index 52ee1d5e..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc deleted file mode 100644 index d5041545..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc deleted file mode 100644 index 7c226b89..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc deleted file mode 100644 index 7955047f..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc deleted file mode 100644 index 3841d8bc..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc deleted file mode 100644 index 5b5e7744..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc deleted file mode 100644 index 2ab7bc92..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_api.py b/venv/lib/python3.12/site-packages/httpcore/_api.py deleted file mode 100644 index 38b961d1..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_api.py +++ /dev/null @@ -1,94 +0,0 @@ -from __future__ import annotations - -import contextlib -import typing - -from ._models import URL, Extensions, HeaderTypes, Response -from ._sync.connection_pool import ConnectionPool - - -def request( - method: bytes | str, - url: URL | bytes | str, - *, - headers: HeaderTypes = None, - content: bytes | typing.Iterator[bytes] | None = None, - extensions: Extensions | None = None, -) -> Response: - """ - Sends an HTTP request, returning the response. - - ``` - response = httpcore.request("GET", "https://www.example.com/") - ``` - - Arguments: - method: The HTTP method for the request. Typically one of `"GET"`, - `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. - url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, - or as str/bytes. - headers: The HTTP request headers. Either as a dictionary of str/bytes, - or as a list of two-tuples of str/bytes. - content: The content of the request body. Either as bytes, - or as a bytes iterator. - extensions: A dictionary of optional extra information included on the request. - Possible keys include `"timeout"`. - - Returns: - An instance of `httpcore.Response`. - """ - with ConnectionPool() as pool: - return pool.request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - - -@contextlib.contextmanager -def stream( - method: bytes | str, - url: URL | bytes | str, - *, - headers: HeaderTypes = None, - content: bytes | typing.Iterator[bytes] | None = None, - extensions: Extensions | None = None, -) -> typing.Iterator[Response]: - """ - Sends an HTTP request, returning the response within a content manager. - - ``` - with httpcore.stream("GET", "https://www.example.com/") as response: - ... - ``` - - When using the `stream()` function, the body of the response will not be - automatically read. If you want to access the response body you should - either use `content = response.read()`, or `for chunk in response.iter_content()`. - - Arguments: - method: The HTTP method for the request. Typically one of `"GET"`, - `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. - url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, - or as str/bytes. - headers: The HTTP request headers. Either as a dictionary of str/bytes, - or as a list of two-tuples of str/bytes. - content: The content of the request body. Either as bytes, - or as a bytes iterator. - extensions: A dictionary of optional extra information included on the request. - Possible keys include `"timeout"`. - - Returns: - An instance of `httpcore.Response`. - """ - with ConnectionPool() as pool: - with pool.stream( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) as response: - yield response diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py deleted file mode 100644 index 88dc7f01..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from .connection import AsyncHTTPConnection -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .http_proxy import AsyncHTTPProxy -from .interfaces import AsyncConnectionInterface - -try: - from .http2 import AsyncHTTP2Connection -except ImportError: # pragma: nocover - - class AsyncHTTP2Connection: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use http2 support, but the `h2` package is not " - "installed. Use 'pip install httpcore[http2]'." - ) - - -try: - from .socks_proxy import AsyncSOCKSProxy -except ImportError: # pragma: nocover - - class AsyncSOCKSProxy: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use SOCKS support, but the `socksio` package is not " - "installed. Use 'pip install httpcore[socks]'." - ) - - -__all__ = [ - "AsyncHTTPConnection", - "AsyncConnectionPool", - "AsyncHTTPProxy", - "AsyncHTTP11Connection", - "AsyncHTTP2Connection", - "AsyncConnectionInterface", - "AsyncSOCKSProxy", -] diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index a77fcb9f..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc deleted file mode 100644 index dc7e6abf..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc deleted file mode 100644 index adf438ab..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc deleted file mode 100644 index b1eba746..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc deleted file mode 100644 index b8a74df8..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc deleted file mode 100644 index c2870717..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc deleted file mode 100644 index 2c5e2fb5..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc deleted file mode 100644 index 2f832439..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/connection.py b/venv/lib/python3.12/site-packages/httpcore/_async/connection.py deleted file mode 100644 index b42581df..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/connection.py +++ /dev/null @@ -1,222 +0,0 @@ -from __future__ import annotations - -import itertools -import logging -import ssl -import types -import typing - -from .._backends.auto import AutoBackend -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream -from .._exceptions import ConnectError, ConnectTimeout -from .._models import Origin, Request, Response -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. - - -logger = logging.getLogger("httpcore.connection") - - -def exponential_backoff(factor: float) -> typing.Iterator[float]: - """ - Generate a geometric sequence that has a ratio of 2 and starts with 0. - - For example: - - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` - """ - yield 0 - for n in itertools.count(): - yield factor * 2**n - - -class AsyncHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - origin: Origin, - ssl_context: ssl.SSLContext | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: str | None = None, - uds: str | None = None, - network_backend: AsyncNetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - self._origin = origin - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend: AsyncNetworkBackend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._connection: AsyncConnectionInterface | None = None - self._connect_failed: bool = False - self._request_lock = AsyncLock() - self._socket_options = socket_options - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection to {self._origin}" - ) - - try: - async with self._request_lock: - if self._connection is None: - stream = await self._connect(request) - - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except BaseException as exc: - self._connect_failed = True - raise exc - - return await self._connection.handle_async_request(request) - - async def _connect(self, request: Request) -> AsyncNetworkStream: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - retries_left = self._retries - delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) - - while True: - try: - if self._uds is None: - kwargs = { - "host": self._origin.host.decode("ascii"), - "port": self._origin.port, - "local_address": self._local_address, - "timeout": timeout, - "socket_options": self._socket_options, - } - async with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = await self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - else: - kwargs = { - "path": self._uds, - "timeout": timeout, - "socket_options": self._socket_options, - } - async with Trace( - "connect_unix_socket", logger, request, kwargs - ) as trace: - stream = await self._network_backend.connect_unix_socket( - **kwargs - ) - trace.return_value = stream - - if self._origin.scheme in (b"https", b"wss"): - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - return stream - except (ConnectError, ConnectTimeout): - if retries_left <= 0: - raise - retries_left -= 1 - delay = next(delays) - async with Trace("retry", logger, request, kwargs) as trace: - await self._network_backend.sleep(delay) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - async def aclose(self) -> None: - if self._connection is not None: - async with Trace("close", logger, None, {}): - await self._connection.aclose() - - def is_available(self) -> bool: - if self._connection is None: - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> AsyncHTTPConnection: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - await self.aclose() diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py b/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py deleted file mode 100644 index 96e973d0..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py +++ /dev/null @@ -1,420 +0,0 @@ -from __future__ import annotations - -import ssl -import sys -import types -import typing - -from .._backends.auto import AutoBackend -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend -from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol -from .._models import Origin, Proxy, Request, Response -from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock -from .connection import AsyncHTTPConnection -from .interfaces import AsyncConnectionInterface, AsyncRequestInterface - - -class AsyncPoolRequest: - def __init__(self, request: Request) -> None: - self.request = request - self.connection: AsyncConnectionInterface | None = None - self._connection_acquired = AsyncEvent() - - def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None: - self.connection = connection - self._connection_acquired.set() - - def clear_connection(self) -> None: - self.connection = None - self._connection_acquired = AsyncEvent() - - async def wait_for_connection( - self, timeout: float | None = None - ) -> AsyncConnectionInterface: - if self.connection is None: - await self._connection_acquired.wait(timeout=timeout) - assert self.connection is not None - return self.connection - - def is_queued(self) -> bool: - return self.connection is None - - -class AsyncConnectionPool(AsyncRequestInterface): - """ - A connection pool for making HTTP requests. - """ - - def __init__( - self, - ssl_context: ssl.SSLContext | None = None, - proxy: Proxy | None = None, - max_connections: int | None = 10, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: str | None = None, - uds: str | None = None, - network_backend: AsyncNetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish a - connection. - local_address: Local address to connect from. Can also be used to connect - using a particular address family. Using `local_address="0.0.0.0"` - will connect using an `AF_INET` address (IPv4), while using - `local_address="::"` will connect using an `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - socket_options: Socket options that have to be included - in the TCP socket when the connection was established. - """ - self._ssl_context = ssl_context - self._proxy = proxy - self._max_connections = ( - sys.maxsize if max_connections is None else max_connections - ) - self._max_keepalive_connections = ( - sys.maxsize - if max_keepalive_connections is None - else max_keepalive_connections - ) - self._max_keepalive_connections = min( - self._max_connections, self._max_keepalive_connections - ) - - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._socket_options = socket_options - - # The mutable state on a connection pool is the queue of incoming requests, - # and the set of connections that are servicing those requests. - self._connections: list[AsyncConnectionInterface] = [] - self._requests: list[AsyncPoolRequest] = [] - - # We only mutate the state of the connection pool within an 'optional_thread_lock' - # context. This holds a threading lock unless we're running in async mode, - # in which case it is a no-op. - self._optional_thread_lock = AsyncThreadLock() - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - if self._proxy is not None: - if self._proxy.url.scheme in (b"socks5", b"socks5h"): - from .socks_proxy import AsyncSocks5Connection - - return AsyncSocks5Connection( - proxy_origin=self._proxy.url.origin, - proxy_auth=self._proxy.auth, - remote_origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - elif origin.scheme == b"http": - from .http_proxy import AsyncForwardHTTPConnection - - return AsyncForwardHTTPConnection( - proxy_origin=self._proxy.url.origin, - proxy_headers=self._proxy.headers, - proxy_ssl_context=self._proxy.ssl_context, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - ) - from .http_proxy import AsyncTunnelHTTPConnection - - return AsyncTunnelHTTPConnection( - proxy_origin=self._proxy.url.origin, - proxy_headers=self._proxy.headers, - proxy_ssl_context=self._proxy.ssl_context, - remote_origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - return AsyncHTTPConnection( - origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - retries=self._retries, - local_address=self._local_address, - uds=self._uds, - network_backend=self._network_backend, - socket_options=self._socket_options, - ) - - @property - def connections(self) -> list[AsyncConnectionInterface]: - """ - Return a list of the connections currently in the pool. - - For example: - - ```python - >>> pool.connections - [ - , - , - , - ] - ``` - """ - return list(self._connections) - - async def handle_async_request(self, request: Request) -> Response: - """ - Send an HTTP request, and return an HTTP response. - - This is the core implementation that is called into by `.request()` or `.stream()`. - """ - scheme = request.url.scheme.decode() - if scheme == "": - raise UnsupportedProtocol( - "Request URL is missing an 'http://' or 'https://' protocol." - ) - if scheme not in ("http", "https", "ws", "wss"): - raise UnsupportedProtocol( - f"Request URL has an unsupported protocol '{scheme}://'." - ) - - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("pool", None) - - with self._optional_thread_lock: - # Add the incoming request to our request queue. - pool_request = AsyncPoolRequest(request) - self._requests.append(pool_request) - - try: - while True: - with self._optional_thread_lock: - # Assign incoming requests to available connections, - # closing or creating new connections as required. - closing = self._assign_requests_to_connections() - await self._close_connections(closing) - - # Wait until this request has an assigned connection. - connection = await pool_request.wait_for_connection(timeout=timeout) - - try: - # Send the request on the assigned connection. - response = await connection.handle_async_request( - pool_request.request - ) - except ConnectionNotAvailable: - # In some cases a connection may initially be available to - # handle a request, but then become unavailable. - # - # In this case we clear the connection and try again. - pool_request.clear_connection() - else: - break # pragma: nocover - - except BaseException as exc: - with self._optional_thread_lock: - # For any exception or cancellation we remove the request from - # the queue, and then re-assign requests to connections. - self._requests.remove(pool_request) - closing = self._assign_requests_to_connections() - - await self._close_connections(closing) - raise exc from None - - # Return the response. Note that in this case we still have to manage - # the point at which the response is closed. - assert isinstance(response.stream, typing.AsyncIterable) - return Response( - status=response.status, - headers=response.headers, - content=PoolByteStream( - stream=response.stream, pool_request=pool_request, pool=self - ), - extensions=response.extensions, - ) - - def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]: - """ - Manage the state of the connection pool, assigning incoming - requests to connections as available. - - Called whenever a new request is added or removed from the pool. - - Any closing connections are returned, allowing the I/O for closing - those connections to be handled seperately. - """ - closing_connections = [] - - # First we handle cleaning up any connections that are closed, - # have expired their keep-alive, or surplus idle connections. - for connection in list(self._connections): - if connection.is_closed(): - # log: "removing closed connection" - self._connections.remove(connection) - elif connection.has_expired(): - # log: "closing expired connection" - self._connections.remove(connection) - closing_connections.append(connection) - elif ( - connection.is_idle() - and len([connection.is_idle() for connection in self._connections]) - > self._max_keepalive_connections - ): - # log: "closing idle connection" - self._connections.remove(connection) - closing_connections.append(connection) - - # Assign queued requests to connections. - queued_requests = [request for request in self._requests if request.is_queued()] - for pool_request in queued_requests: - origin = pool_request.request.url.origin - available_connections = [ - connection - for connection in self._connections - if connection.can_handle_request(origin) and connection.is_available() - ] - idle_connections = [ - connection for connection in self._connections if connection.is_idle() - ] - - # There are three cases for how we may be able to handle the request: - # - # 1. There is an existing connection that can handle the request. - # 2. We can create a new connection to handle the request. - # 3. We can close an idle connection and then create a new connection - # to handle the request. - if available_connections: - # log: "reusing existing connection" - connection = available_connections[0] - pool_request.assign_to_connection(connection) - elif len(self._connections) < self._max_connections: - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - elif idle_connections: - # log: "closing idle connection" - connection = idle_connections[0] - self._connections.remove(connection) - closing_connections.append(connection) - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - - return closing_connections - - async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None: - # Close connections which have been removed from the pool. - with AsyncShieldCancellation(): - for connection in closing: - await connection.aclose() - - async def aclose(self) -> None: - # Explicitly close the connection pool. - # Clears all existing requests and connections. - with self._optional_thread_lock: - closing_connections = list(self._connections) - self._connections = [] - await self._close_connections(closing_connections) - - async def __aenter__(self) -> AsyncConnectionPool: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - await self.aclose() - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - with self._optional_thread_lock: - request_is_queued = [request.is_queued() for request in self._requests] - connection_is_idle = [ - connection.is_idle() for connection in self._connections - ] - - num_active_requests = request_is_queued.count(False) - num_queued_requests = request_is_queued.count(True) - num_active_connections = connection_is_idle.count(False) - num_idle_connections = connection_is_idle.count(True) - - requests_info = ( - f"Requests: {num_active_requests} active, {num_queued_requests} queued" - ) - connection_info = ( - f"Connections: {num_active_connections} active, {num_idle_connections} idle" - ) - - return f"<{class_name} [{requests_info} | {connection_info}]>" - - -class PoolByteStream: - def __init__( - self, - stream: typing.AsyncIterable[bytes], - pool_request: AsyncPoolRequest, - pool: AsyncConnectionPool, - ) -> None: - self._stream = stream - self._pool_request = pool_request - self._pool = pool - self._closed = False - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - try: - async for part in self._stream: - yield part - except BaseException as exc: - await self.aclose() - raise exc from None - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - with AsyncShieldCancellation(): - if hasattr(self._stream, "aclose"): - await self._stream.aclose() - - with self._pool._optional_thread_lock: - self._pool._requests.remove(self._pool_request) - closing = self._pool._assign_requests_to_connections() - - await self._pool._close_connections(closing) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http11.py b/venv/lib/python3.12/site-packages/httpcore/_async/http11.py deleted file mode 100644 index e6d6d709..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/http11.py +++ /dev/null @@ -1,379 +0,0 @@ -from __future__ import annotations - -import enum -import logging -import ssl -import time -import types -import typing - -import h11 - -from .._backends.base import AsyncNetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, - WriteError, - map_exceptions, -) -from .._models import Origin, Request, Response -from .._synchronization import AsyncLock, AsyncShieldCancellation -from .._trace import Trace -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.http11") - - -# A subset of `h11.Event` types supported by `_send_event` -H11SendEvent = typing.Union[ - h11.Request, - h11.Data, - h11.EndOfMessage, -] - - -class HTTPConnectionState(enum.IntEnum): - NEW = 0 - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class AsyncHTTP11Connection(AsyncConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - - def __init__( - self, - origin: Origin, - stream: AsyncNetworkStream, - keepalive_expiry: float | None = None, - ) -> None: - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: float | None = keepalive_expiry - self._expire_at: float | None = None - self._state = HTTPConnectionState.NEW - self._state_lock = AsyncLock() - self._request_count = 0 - self._h11_state = h11.Connection( - our_role=h11.CLIENT, - max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, - ) - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - async with self._state_lock: - if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): - self._request_count += 1 - self._state = HTTPConnectionState.ACTIVE - self._expire_at = None - else: - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request} - try: - async with Trace( - "send_request_headers", logger, request, kwargs - ) as trace: - await self._send_request_headers(**kwargs) - async with Trace("send_request_body", logger, request, kwargs) as trace: - await self._send_request_body(**kwargs) - except WriteError: - # If we get a write error while we're writing the request, - # then we supress this error and move on to attempting to - # read the response. Servers can sometimes close the request - # pre-emptively and then respond with a well formed HTTP - # error response. - pass - - async with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - ( - http_version, - status, - reason_phrase, - headers, - trailing_data, - ) = await self._receive_response_headers(**kwargs) - trace.return_value = ( - http_version, - status, - reason_phrase, - headers, - ) - - network_stream = self._network_stream - - # CONNECT or Upgrade request - if (status == 101) or ( - (request.method == b"CONNECT") and (200 <= status < 300) - ): - network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) - - return Response( - status=status, - headers=headers, - content=HTTP11ConnectionByteStream(self, request), - extensions={ - "http_version": http_version, - "reason_phrase": reason_phrase, - "network_stream": network_stream, - }, - ) - except BaseException as exc: - with AsyncShieldCancellation(): - async with Trace("response_closed", logger, request) as trace: - await self._response_closed() - raise exc - - # Sending the request... - - async def _send_request_headers(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request( - method=request.method, - target=request.url.target, - headers=request.headers, - ) - await self._send_event(event, timeout=timeout) - - async def _send_request_body(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - assert isinstance(request.stream, typing.AsyncIterable) - async for chunk in request.stream: - event = h11.Data(data=chunk) - await self._send_event(event, timeout=timeout) - - await self._send_event(h11.EndOfMessage(), timeout=timeout) - - async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: - bytes_to_send = self._h11_state.send(event) - if bytes_to_send is not None: - await self._network_stream.write(bytes_to_send, timeout=timeout) - - # Receiving the response... - - async def _receive_response_headers( - self, request: Request - ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = await self._receive_event(timeout=timeout) - if isinstance(event, h11.Response): - break - if ( - isinstance(event, h11.InformationalResponse) - and event.status_code == 101 - ): - break - - http_version = b"HTTP/" + event.http_version - - # h11 version 0.11+ supports a `raw_items` interface to get the - # raw header casing, rather than the enforced lowercase headers. - headers = event.headers.raw_items() - - trailing_data, _ = self._h11_state.trailing_data - - return http_version, event.status_code, event.reason, headers, trailing_data - - async def _receive_response_body( - self, request: Request - ) -> typing.AsyncIterator[bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = await self._receive_event(timeout=timeout) - if isinstance(event, h11.Data): - yield bytes(event.data) - elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - break - - async def _receive_event( - self, timeout: float | None = None - ) -> h11.Event | type[h11.PAUSED]: - while True: - with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): - event = self._h11_state.next_event() - - if event is h11.NEED_DATA: - data = await self._network_stream.read( - self.READ_NUM_BYTES, timeout=timeout - ) - - # If we feed this case through h11 we'll raise an exception like: - # - # httpcore.RemoteProtocolError: can't handle event type - # ConnectionClosed when role=SERVER and state=SEND_RESPONSE - # - # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle this case distinctly and treat - # it as a ConnectError. - if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: - msg = "Server disconnected without sending a response." - raise RemoteProtocolError(msg) - - self._h11_state.receive_data(data) - else: - # mypy fails to narrow the type in the above if statement above - return event # type: ignore[return-value] - - async def _response_closed(self) -> None: - async with self._state_lock: - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._state = HTTPConnectionState.IDLE - self._h11_state.start_next_cycle() - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - else: - await self.aclose() - - # Once the connection is no longer required... - - async def aclose(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._state = HTTPConnectionState.CLOSED - await self._network_stream.aclose() - - # The AsyncConnectionInterface methods provide information about the state of - # the connection, allowing for a connection pooling implementation to - # determine when to reuse and when to close the connection... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - # Note that HTTP/1.1 connections in the "NEW" state are not treated as - # being "available". The control flow which created the connection will - # be able to send an outgoing request, but the connection will not be - # acquired from the connection pool for any other request. - return self._state == HTTPConnectionState.IDLE - - def has_expired(self) -> bool: - now = time.monotonic() - keepalive_expired = self._expire_at is not None and now > self._expire_at - - # If the HTTP connection is idle but the socket is readable, then the - # only valid state is that the socket is about to return b"", indicating - # a server-initiated disconnect. - server_disconnected = ( - self._state == HTTPConnectionState.IDLE - and self._network_stream.get_extra_info("is_readable") - ) - - return keepalive_expired or server_disconnected - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/1.1, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> AsyncHTTP11Connection: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - await self.aclose() - - -class HTTP11ConnectionByteStream: - def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: - self._connection = connection - self._request = request - self._closed = False - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - kwargs = {"request": self._request} - try: - async with Trace("receive_response_body", logger, self._request, kwargs): - async for chunk in self._connection._receive_response_body(**kwargs): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - async with Trace("response_closed", logger, self._request): - await self._connection._response_closed() - - -class AsyncHTTP11UpgradeStream(AsyncNetworkStream): - def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: - self._stream = stream - self._leading_data = leading_data - - async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - if self._leading_data: - buffer = self._leading_data[:max_bytes] - self._leading_data = self._leading_data[max_bytes:] - return buffer - else: - return await self._stream.read(max_bytes, timeout) - - async def write(self, buffer: bytes, timeout: float | None = None) -> None: - await self._stream.write(buffer, timeout) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> AsyncNetworkStream: - return await self._stream.start_tls(ssl_context, server_hostname, timeout) - - def get_extra_info(self, info: str) -> typing.Any: - return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http2.py b/venv/lib/python3.12/site-packages/httpcore/_async/http2.py deleted file mode 100644 index dbd0beeb..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/http2.py +++ /dev/null @@ -1,592 +0,0 @@ -from __future__ import annotations - -import enum -import logging -import time -import types -import typing - -import h2.config -import h2.connection -import h2.events -import h2.exceptions -import h2.settings - -from .._backends.base import AsyncNetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, -) -from .._models import Origin, Request, Response -from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation -from .._trace import Trace -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.http2") - - -def has_body_headers(request: Request) -> bool: - return any( - k.lower() == b"content-length" or k.lower() == b"transfer-encoding" - for k, v in request.headers - ) - - -class HTTPConnectionState(enum.IntEnum): - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class AsyncHTTP2Connection(AsyncConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) - - def __init__( - self, - origin: Origin, - stream: AsyncNetworkStream, - keepalive_expiry: float | None = None, - ): - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: float | None = keepalive_expiry - self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - self._state = HTTPConnectionState.IDLE - self._expire_at: float | None = None - self._request_count = 0 - self._init_lock = AsyncLock() - self._state_lock = AsyncLock() - self._read_lock = AsyncLock() - self._write_lock = AsyncLock() - self._sent_connection_init = False - self._used_all_stream_ids = False - self._connection_error = False - - # Mapping from stream ID to response stream events. - self._events: dict[ - int, - list[ - h2.events.ResponseReceived - | h2.events.DataReceived - | h2.events.StreamEnded - | h2.events.StreamReset, - ], - ] = {} - - # Connection terminated events are stored as state since - # we need to handle them for all streams. - self._connection_terminated: h2.events.ConnectionTerminated | None = None - - self._read_exception: Exception | None = None - self._write_exception: Exception | None = None - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - # This cannot occur in normal operation, since the connection pool - # will only send requests on connections that handle them. - # It's in place simply for resilience as a guard against incorrect - # usage, for anyone working directly with httpcore connections. - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - async with self._state_lock: - if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): - self._request_count += 1 - self._expire_at = None - self._state = HTTPConnectionState.ACTIVE - else: - raise ConnectionNotAvailable() - - async with self._init_lock: - if not self._sent_connection_init: - try: - sci_kwargs = {"request": request} - async with Trace( - "send_connection_init", logger, request, sci_kwargs - ): - await self._send_connection_init(**sci_kwargs) - except BaseException as exc: - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - self._sent_connection_init = True - - # Initially start with just 1 until the remote server provides - # its max_concurrent_streams value - self._max_streams = 1 - - local_settings_max_streams = ( - self._h2_state.local_settings.max_concurrent_streams - ) - self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) - - for _ in range(local_settings_max_streams - self._max_streams): - await self._max_streams_semaphore.acquire() - - await self._max_streams_semaphore.acquire() - - try: - stream_id = self._h2_state.get_next_available_stream_id() - self._events[stream_id] = [] - except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover - self._used_all_stream_ids = True - self._request_count -= 1 - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request, "stream_id": stream_id} - async with Trace("send_request_headers", logger, request, kwargs): - await self._send_request_headers(request=request, stream_id=stream_id) - async with Trace("send_request_body", logger, request, kwargs): - await self._send_request_body(request=request, stream_id=stream_id) - async with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - status, headers = await self._receive_response( - request=request, stream_id=stream_id - ) - trace.return_value = (status, headers) - - return Response( - status=status, - headers=headers, - content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), - extensions={ - "http_version": b"HTTP/2", - "network_stream": self._network_stream, - "stream_id": stream_id, - }, - ) - except BaseException as exc: # noqa: PIE786 - with AsyncShieldCancellation(): - kwargs = {"stream_id": stream_id} - async with Trace("response_closed", logger, request, kwargs): - await self._response_closed(stream_id=stream_id) - - if isinstance(exc, h2.exceptions.ProtocolError): - # One case where h2 can raise a protocol error is when a - # closed frame has been seen by the state machine. - # - # This happens when one stream is reading, and encounters - # a GOAWAY event. Other flows of control may then raise - # a protocol error at any point they interact with the 'h2_state'. - # - # In this case we'll have stored the event, and should raise - # it as a RemoteProtocolError. - if self._connection_terminated: # pragma: nocover - raise RemoteProtocolError(self._connection_terminated) - # If h2 raises a protocol error in some other state then we - # must somehow have made a protocol violation. - raise LocalProtocolError(exc) # pragma: nocover - - raise exc - - async def _send_connection_init(self, request: Request) -> None: - """ - The HTTP/2 connection requires some initial setup before we can start - using individual request/response streams on it. - """ - # Need to set these manually here instead of manipulating via - # __setitem__() otherwise the H2Connection will emit SettingsUpdate - # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = h2.settings.Settings( - client=True, - initial_values={ - # Disable PUSH_PROMISE frames from the server since we don't do anything - # with them for now. Maybe when we support caching? - h2.settings.SettingCodes.ENABLE_PUSH: 0, - # These two are taken from h2 for safe defaults - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, - h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, - }, - ) - - # Some websites (*cough* Yahoo *cough*) balk at this setting being - # present in the initial handshake since it's not defined in the original - # RFC despite the RFC mandating ignoring settings you don't know about. - del self._h2_state.local_settings[ - h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL - ] - - self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2**24) - await self._write_outgoing_data(request) - - # Sending the request... - - async def _send_request_headers(self, request: Request, stream_id: int) -> None: - """ - Send the request headers to a given stream ID. - """ - end_stream = not has_body_headers(request) - - # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. - # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require - # HTTP/1.1 style headers, and map them appropriately if we end up on - # an HTTP/2 connection. - authority = [v for k, v in request.headers if k.lower() == b"host"][0] - - headers = [ - (b":method", request.method), - (b":authority", authority), - (b":scheme", request.url.scheme), - (b":path", request.url.target), - ] + [ - (k.lower(), v) - for k, v in request.headers - if k.lower() - not in ( - b"host", - b"transfer-encoding", - ) - ] - - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) - await self._write_outgoing_data(request) - - async def _send_request_body(self, request: Request, stream_id: int) -> None: - """ - Iterate over the request body sending it to a given stream ID. - """ - if not has_body_headers(request): - return - - assert isinstance(request.stream, typing.AsyncIterable) - async for data in request.stream: - await self._send_stream_data(request, stream_id, data) - await self._send_end_stream(request, stream_id) - - async def _send_stream_data( - self, request: Request, stream_id: int, data: bytes - ) -> None: - """ - Send a single chunk of data in one or more data frames. - """ - while data: - max_flow = await self._wait_for_outgoing_flow(request, stream_id) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - self._h2_state.send_data(stream_id, chunk) - await self._write_outgoing_data(request) - - async def _send_end_stream(self, request: Request, stream_id: int) -> None: - """ - Send an empty data frame on on a given stream ID with the END_STREAM flag set. - """ - self._h2_state.end_stream(stream_id) - await self._write_outgoing_data(request) - - # Receiving the response... - - async def _receive_response( - self, request: Request, stream_id: int - ) -> tuple[int, list[tuple[bytes, bytes]]]: - """ - Return the response status code and headers for a given stream ID. - """ - while True: - event = await self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.ResponseReceived): - break - - status_code = 200 - headers = [] - assert event.headers is not None - for k, v in event.headers: - if k == b":status": - status_code = int(v.decode("ascii", errors="ignore")) - elif not k.startswith(b":"): - headers.append((k, v)) - - return (status_code, headers) - - async def _receive_response_body( - self, request: Request, stream_id: int - ) -> typing.AsyncIterator[bytes]: - """ - Iterator that returns the bytes of the response body for a given stream ID. - """ - while True: - event = await self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.DataReceived): - assert event.flow_controlled_length is not None - assert event.data is not None - amount = event.flow_controlled_length - self._h2_state.acknowledge_received_data(amount, stream_id) - await self._write_outgoing_data(request) - yield event.data - elif isinstance(event, h2.events.StreamEnded): - break - - async def _receive_stream_event( - self, request: Request, stream_id: int - ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: - """ - Return the next available event for a given stream ID. - - Will read more data from the network if required. - """ - while not self._events.get(stream_id): - await self._receive_events(request, stream_id) - event = self._events[stream_id].pop(0) - if isinstance(event, h2.events.StreamReset): - raise RemoteProtocolError(event) - return event - - async def _receive_events( - self, request: Request, stream_id: int | None = None - ) -> None: - """ - Read some data from the network until we see one or more events - for a given stream ID. - """ - async with self._read_lock: - if self._connection_terminated is not None: - last_stream_id = self._connection_terminated.last_stream_id - if stream_id and last_stream_id and stream_id > last_stream_id: - self._request_count -= 1 - raise ConnectionNotAvailable() - raise RemoteProtocolError(self._connection_terminated) - - # This conditional is a bit icky. We don't want to block reading if we've - # actually got an event to return for a given stream. We need to do that - # check *within* the atomic read lock. Though it also need to be optional, - # because when we call it from `_wait_for_outgoing_flow` we *do* want to - # block until we've available flow control, event when we have events - # pending for the stream ID we're attempting to send on. - if stream_id is None or not self._events.get(stream_id): - events = await self._read_incoming_data(request) - for event in events: - if isinstance(event, h2.events.RemoteSettingsChanged): - async with Trace( - "receive_remote_settings", logger, request - ) as trace: - await self._receive_remote_settings_change(event) - trace.return_value = event - - elif isinstance( - event, - ( - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ), - ): - if event.stream_id in self._events: - self._events[event.stream_id].append(event) - - elif isinstance(event, h2.events.ConnectionTerminated): - self._connection_terminated = event - - await self._write_outgoing_data(request) - - async def _receive_remote_settings_change( - self, event: h2.events.RemoteSettingsChanged - ) -> None: - max_concurrent_streams = event.changed_settings.get( - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS - ) - if max_concurrent_streams: - new_max_streams = min( - max_concurrent_streams.new_value, - self._h2_state.local_settings.max_concurrent_streams, - ) - if new_max_streams and new_max_streams != self._max_streams: - while new_max_streams > self._max_streams: - await self._max_streams_semaphore.release() - self._max_streams += 1 - while new_max_streams < self._max_streams: - await self._max_streams_semaphore.acquire() - self._max_streams -= 1 - - async def _response_closed(self, stream_id: int) -> None: - await self._max_streams_semaphore.release() - del self._events[stream_id] - async with self._state_lock: - if self._connection_terminated and not self._events: - await self.aclose() - - elif self._state == HTTPConnectionState.ACTIVE and not self._events: - self._state = HTTPConnectionState.IDLE - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - if self._used_all_stream_ids: # pragma: nocover - await self.aclose() - - async def aclose(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._h2_state.close_connection() - self._state = HTTPConnectionState.CLOSED - await self._network_stream.aclose() - - # Wrappers around network read/write operations... - - async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - if self._read_exception is not None: - raise self._read_exception # pragma: nocover - - try: - data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - except Exception as exc: - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future reads. - # (For example, this means that a single read timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._read_exception = exc - self._connection_error = True - raise exc - - events: list[h2.events.Event] = self._h2_state.receive_data(data) - - return events - - async def _write_outgoing_data(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - async with self._write_lock: - data_to_send = self._h2_state.data_to_send() - - if self._write_exception is not None: - raise self._write_exception # pragma: nocover - - try: - await self._network_stream.write(data_to_send, timeout) - except Exception as exc: # pragma: nocover - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future write. - # (For example, this means that a single write timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._write_exception = exc - self._connection_error = True - raise exc - - # Flow control... - - async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow: int = self._h2_state.local_flow_control_window(stream_id) - max_frame_size: int = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - while flow == 0: - await self._receive_events(request) - local_flow = self._h2_state.local_flow_control_window(stream_id) - max_frame_size = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - return flow - - # Interface for connection pooling... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - return ( - self._state != HTTPConnectionState.CLOSED - and not self._connection_error - and not self._used_all_stream_ids - and not ( - self._h2_state.state_machine.state - == h2.connection.ConnectionState.CLOSED - ) - ) - - def has_expired(self) -> bool: - now = time.monotonic() - return self._expire_at is not None and now > self._expire_at - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/2, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> AsyncHTTP2Connection: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - await self.aclose() - - -class HTTP2ConnectionByteStream: - def __init__( - self, connection: AsyncHTTP2Connection, request: Request, stream_id: int - ) -> None: - self._connection = connection - self._request = request - self._stream_id = stream_id - self._closed = False - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - kwargs = {"request": self._request, "stream_id": self._stream_id} - try: - async with Trace("receive_response_body", logger, self._request, kwargs): - async for chunk in self._connection._receive_response_body( - request=self._request, stream_id=self._stream_id - ): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - kwargs = {"stream_id": self._stream_id} - async with Trace("response_closed", logger, self._request, kwargs): - await self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py deleted file mode 100644 index cc9d9206..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py +++ /dev/null @@ -1,367 +0,0 @@ -from __future__ import annotations - -import base64 -import logging -import ssl -import typing - -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend -from .._exceptions import ProxyError -from .._models import ( - URL, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, -) -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .connection import AsyncHTTPConnection -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -ByteOrStr = typing.Union[bytes, str] -HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] -HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] - - -logger = logging.getLogger("httpcore.proxy") - - -def merge_headers( - default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, - override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, -) -> list[tuple[bytes, bytes]]: - """ - Append default_headers and override_headers, de-duplicating if a key exists - in both cases. - """ - default_headers = [] if default_headers is None else list(default_headers) - override_headers = [] if override_headers is None else list(override_headers) - has_override = set(key.lower() for key, value in override_headers) - default_headers = [ - (key, value) - for key, value in default_headers - if key.lower() not in has_override - ] - return default_headers + override_headers - - -class AsyncHTTPProxy(AsyncConnectionPool): # pragma: nocover - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: URL | bytes | str, - proxy_auth: tuple[bytes | str, bytes | str] | None = None, - proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, - ssl_context: ssl.SSLContext | None = None, - proxy_ssl_context: ssl.SSLContext | None = None, - max_connections: int | None = 10, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: str | None = None, - uds: str | None = None, - network_backend: AsyncNetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - proxy_auth: Any proxy authentication as a two-tuple of - (username, password). May be either bytes or ascii-only str. - proxy_headers: Any HTTP headers to use for the proxy requests. - For example `{"Proxy-Authorization": "Basic :"}`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - local_address=local_address, - uds=uds, - socket_options=socket_options, - ) - - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if ( - self._proxy_url.scheme == b"http" and proxy_ssl_context is not None - ): # pragma: no cover - raise RuntimeError( - "The `proxy_ssl_context` argument is not allowed for the http scheme" - ) - - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - if proxy_auth is not None: - username = enforce_bytes(proxy_auth[0], name="proxy_auth") - password = enforce_bytes(proxy_auth[1], name="proxy_auth") - userpass = username + b":" + password - authorization = b"Basic " + base64.b64encode(userpass) - self._proxy_headers = [ - (b"Proxy-Authorization", authorization) - ] + self._proxy_headers - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - if origin.scheme == b"http": - return AsyncForwardHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - proxy_ssl_context=self._proxy_ssl_context, - ) - return AsyncTunnelHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - ssl_context=self._ssl_context, - proxy_ssl_context=self._proxy_ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class AsyncForwardHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, - keepalive_expiry: float | None = None, - network_backend: AsyncNetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - proxy_ssl_context: ssl.SSLContext | None = None, - ) -> None: - self._connection = AsyncHTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._remote_origin = remote_origin - - async def handle_async_request(self, request: Request) -> Response: - headers = merge_headers(self._proxy_headers, request.headers) - url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=bytes(request.url), - ) - proxy_request = Request( - method=request.method, - url=url, - headers=headers, - content=request.stream, - extensions=request.extensions, - ) - return await self._connection.handle_async_request(proxy_request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - await self._connection.aclose() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - -class AsyncTunnelHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - ssl_context: ssl.SSLContext | None = None, - proxy_ssl_context: ssl.SSLContext | None = None, - proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - network_backend: AsyncNetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - self._connection: AsyncConnectionInterface = AsyncHTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._connect_lock = AsyncLock() - self._connected = False - - async def handle_async_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("connect", None) - - async with self._connect_lock: - if not self._connected: - target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) - - connect_url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=target, - ) - connect_headers = merge_headers( - [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers - ) - connect_request = Request( - method=b"CONNECT", - url=connect_url, - headers=connect_headers, - extensions=request.extensions, - ) - connect_response = await self._connection.handle_async_request( - connect_request - ) - - if connect_response.status < 200 or connect_response.status > 299: - reason_bytes = connect_response.extensions.get("reason_phrase", b"") - reason_str = reason_bytes.decode("ascii", errors="ignore") - msg = "%d %s" % (connect_response.status, reason_str) - await self._connection.aclose() - raise ProxyError(msg) - - stream = connect_response.extensions["network_stream"] - - # Upgrade the stream to SSL - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - - self._connected = True - return await self._connection.handle_async_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - await self._connection.aclose() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py b/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py deleted file mode 100644 index 361583be..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py +++ /dev/null @@ -1,137 +0,0 @@ -from __future__ import annotations - -import contextlib -import typing - -from .._models import ( - URL, - Extensions, - HeaderTypes, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, - include_request_headers, -) - - -class AsyncRequestInterface: - async def request( - self, - method: bytes | str, - url: URL | bytes | str, - *, - headers: HeaderTypes = None, - content: bytes | typing.AsyncIterator[bytes] | None = None, - extensions: Extensions | None = None, - ) -> Response: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = await self.handle_async_request(request) - try: - await response.aread() - finally: - await response.aclose() - return response - - @contextlib.asynccontextmanager - async def stream( - self, - method: bytes | str, - url: URL | bytes | str, - *, - headers: HeaderTypes = None, - content: bytes | typing.AsyncIterator[bytes] | None = None, - extensions: Extensions | None = None, - ) -> typing.AsyncIterator[Response]: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = await self.handle_async_request(request) - try: - yield response - finally: - await response.aclose() - - async def handle_async_request(self, request: Request) -> Response: - raise NotImplementedError() # pragma: nocover - - -class AsyncConnectionInterface(AsyncRequestInterface): - async def aclose(self) -> None: - raise NotImplementedError() # pragma: nocover - - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def can_handle_request(self, origin: Origin) -> bool: - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an - outgoing request. - - An HTTP/1.1 connection will only be available if it is currently idle. - - An HTTP/2 connection will be available so long as the stream ID space is - not yet exhausted, and the connection is not in an error state. - - While the connection is being established we may not yet know if it is going - to result in an HTTP/1.1 or HTTP/2 connection. The connection should be - treated as being available, but might ultimately raise `NewConnectionRequired` - required exceptions if multiple requests are attempted over a connection - that ends up being established as HTTP/1.1. - """ - raise NotImplementedError() # pragma: nocover - - def has_expired(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - - This either means that the connection is idle and it has passed the - expiry time on its keep-alive, or that server has sent an EOF. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - - Used when a response is closed to determine if the connection may be - returned to the connection pool or not. - """ - raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py deleted file mode 100644 index b363f55a..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py +++ /dev/null @@ -1,341 +0,0 @@ -from __future__ import annotations - -import logging -import ssl - -import socksio - -from .._backends.auto import AutoBackend -from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream -from .._exceptions import ConnectionNotAvailable, ProxyError -from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.socks") - - -AUTH_METHODS = { - b"\x00": "NO AUTHENTICATION REQUIRED", - b"\x01": "GSSAPI", - b"\x02": "USERNAME/PASSWORD", - b"\xff": "NO ACCEPTABLE METHODS", -} - -REPLY_CODES = { - b"\x00": "Succeeded", - b"\x01": "General SOCKS server failure", - b"\x02": "Connection not allowed by ruleset", - b"\x03": "Network unreachable", - b"\x04": "Host unreachable", - b"\x05": "Connection refused", - b"\x06": "TTL expired", - b"\x07": "Command not supported", - b"\x08": "Address type not supported", -} - - -async def _init_socks5_connection( - stream: AsyncNetworkStream, - *, - host: bytes, - port: int, - auth: tuple[bytes, bytes] | None = None, -) -> None: - conn = socksio.socks5.SOCKS5Connection() - - # Auth method request - auth_method = ( - socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED - if auth is None - else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD - ) - conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Auth method response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socksio.socks5.SOCKS5AuthReply) - if response.method != auth_method: - requested = AUTH_METHODS.get(auth_method, "UNKNOWN") - responded = AUTH_METHODS.get(response.method, "UNKNOWN") - raise ProxyError( - f"Requested {requested} from proxy server, but got {responded}." - ) - - if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: - # Username/password request - assert auth is not None - username, password = auth - conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Username/password response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) - if not response.success: - raise ProxyError("Invalid username/password") - - # Connect request - conn.send( - socksio.socks5.SOCKS5CommandRequest.from_address( - socksio.socks5.SOCKS5Command.CONNECT, (host, port) - ) - ) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Connect response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socksio.socks5.SOCKS5Reply) - if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: - reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") - raise ProxyError(f"Proxy Server could not connect: {reply_code}.") - - -class AsyncSOCKSProxy(AsyncConnectionPool): # pragma: nocover - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: URL | bytes | str, - proxy_auth: tuple[bytes | str, bytes | str] | None = None, - ssl_context: ssl.SSLContext | None = None, - max_connections: int | None = 10, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - network_backend: AsyncNetworkBackend | None = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - ) - self._ssl_context = ssl_context - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if proxy_auth is not None: - username, password = proxy_auth - username_bytes = enforce_bytes(username, name="proxy_auth") - password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: tuple[bytes, bytes] | None = ( - username_bytes, - password_bytes, - ) - else: - self._proxy_auth = None - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - return AsyncSocks5Connection( - proxy_origin=self._proxy_url.origin, - remote_origin=origin, - proxy_auth=self._proxy_auth, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class AsyncSocks5Connection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_auth: tuple[bytes, bytes] | None = None, - ssl_context: ssl.SSLContext | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - network_backend: AsyncNetworkBackend | None = None, - ) -> None: - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._proxy_auth = proxy_auth - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - - self._network_backend: AsyncNetworkBackend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._connect_lock = AsyncLock() - self._connection: AsyncConnectionInterface | None = None - self._connect_failed = False - - async def handle_async_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - async with self._connect_lock: - if self._connection is None: - try: - # Connect to the proxy - kwargs = { - "host": self._proxy_origin.host.decode("ascii"), - "port": self._proxy_origin.port, - "timeout": timeout, - } - async with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = await self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - - # Connect to the remote host using socks5 - kwargs = { - "stream": stream, - "host": self._remote_origin.host.decode("ascii"), - "port": self._remote_origin.port, - "auth": self._proxy_auth, - } - async with Trace( - "setup_socks5_connection", logger, request, kwargs - ) as trace: - await _init_socks5_connection(**kwargs) - trace.return_value = stream - - # Upgrade the stream to SSL - if self._remote_origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ( - ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ) - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or ( - self._http2 and not self._http1 - ): # pragma: nocover - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): # pragma: nocover - raise ConnectionNotAvailable() - - return await self._connection.handle_async_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - if self._connection is not None: - await self._connection.aclose() - - def is_available(self) -> bool: - if self._connection is None: # pragma: nocover - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._remote_origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: # pragma: nocover - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 050fe128..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc deleted file mode 100644 index 637b1aa7..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc deleted file mode 100644 index 6ed91030..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 782f3031..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc deleted file mode 100644 index 77387494..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc deleted file mode 100644 index 2d7ce7db..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc deleted file mode 100644 index 4cba2d75..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py b/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py deleted file mode 100644 index a140095e..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py +++ /dev/null @@ -1,146 +0,0 @@ -from __future__ import annotations - -import ssl -import typing - -import anyio - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._utils import is_socket_readable -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class AnyIOStream(AsyncNetworkStream): - def __init__(self, stream: anyio.abc.ByteStream) -> None: - self._stream = stream - - async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - exc_map = { - TimeoutError: ReadTimeout, - anyio.BrokenResourceError: ReadError, - anyio.ClosedResourceError: ReadError, - anyio.EndOfStream: ReadError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - try: - return await self._stream.receive(max_bytes=max_bytes) - except anyio.EndOfStream: # pragma: nocover - return b"" - - async def write(self, buffer: bytes, timeout: float | None = None) -> None: - if not buffer: - return - - exc_map = { - TimeoutError: WriteTimeout, - anyio.BrokenResourceError: WriteError, - anyio.ClosedResourceError: WriteError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - await self._stream.send(item=buffer) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> AsyncNetworkStream: - exc_map = { - TimeoutError: ConnectTimeout, - anyio.BrokenResourceError: ConnectError, - anyio.EndOfStream: ConnectError, - ssl.SSLError: ConnectError, - } - with map_exceptions(exc_map): - try: - with anyio.fail_after(timeout): - ssl_stream = await anyio.streams.tls.TLSStream.wrap( - self._stream, - ssl_context=ssl_context, - hostname=server_hostname, - standard_compatible=False, - server_side=False, - ) - except Exception as exc: # pragma: nocover - await self.aclose() - raise exc - return AnyIOStream(ssl_stream) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object": - return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) - if info == "client_addr": - return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) - if info == "server_addr": - return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) - if info == "socket": - return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) - if info == "is_readable": - sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) - return is_socket_readable(sock) - return None - - -class AnyIOBackend(AsyncNetworkBackend): - async def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - exc_map = { - TimeoutError: ConnectTimeout, - OSError: ConnectError, - anyio.BrokenResourceError: ConnectError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - stream: anyio.abc.ByteStream = await anyio.connect_tcp( - remote_host=host, - remote_port=port, - local_host=local_address, - ) - # By default TCP sockets opened in `asyncio` include TCP_NODELAY. - for option in socket_options: - stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return AnyIOStream(stream) - - async def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - exc_map = { - TimeoutError: ConnectTimeout, - OSError: ConnectError, - anyio.BrokenResourceError: ConnectError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - stream: anyio.abc.ByteStream = await anyio.connect_unix(path) - for option in socket_options: - stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return AnyIOStream(stream) - - async def sleep(self, seconds: float) -> None: - await anyio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py b/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py deleted file mode 100644 index 49f0e698..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -import typing - -from .._synchronization import current_async_library -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class AutoBackend(AsyncNetworkBackend): - async def _init_backend(self) -> None: - if not (hasattr(self, "_backend")): - backend = current_async_library() - if backend == "trio": - from .trio import TrioBackend - - self._backend: AsyncNetworkBackend = TrioBackend() - else: - from .anyio import AnyIOBackend - - self._backend = AnyIOBackend() - - async def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: - await self._init_backend() - return await self._backend.connect_tcp( - host, - port, - timeout=timeout, - local_address=local_address, - socket_options=socket_options, - ) - - async def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: # pragma: nocover - await self._init_backend() - return await self._backend.connect_unix_socket( - path, timeout=timeout, socket_options=socket_options - ) - - async def sleep(self, seconds: float) -> None: # pragma: nocover - await self._init_backend() - return await self._backend.sleep(seconds) diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/base.py b/venv/lib/python3.12/site-packages/httpcore/_backends/base.py deleted file mode 100644 index cf55c8b1..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_backends/base.py +++ /dev/null @@ -1,101 +0,0 @@ -from __future__ import annotations - -import ssl -import time -import typing - -SOCKET_OPTION = typing.Union[ - typing.Tuple[int, int, int], - typing.Tuple[int, int, typing.Union[bytes, bytearray]], - typing.Tuple[int, int, None, int], -] - - -class NetworkStream: - def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - raise NotImplementedError() # pragma: nocover - - def write(self, buffer: bytes, timeout: float | None = None) -> None: - raise NotImplementedError() # pragma: nocover - - def close(self) -> None: - raise NotImplementedError() # pragma: nocover - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> NetworkStream: - raise NotImplementedError() # pragma: nocover - - def get_extra_info(self, info: str) -> typing.Any: - return None # pragma: nocover - - -class NetworkBackend: - def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> NetworkStream: - raise NotImplementedError() # pragma: nocover - - def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> NetworkStream: - raise NotImplementedError() # pragma: nocover - - def sleep(self, seconds: float) -> None: - time.sleep(seconds) # pragma: nocover - - -class AsyncNetworkStream: - async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - raise NotImplementedError() # pragma: nocover - - async def write(self, buffer: bytes, timeout: float | None = None) -> None: - raise NotImplementedError() # pragma: nocover - - async def aclose(self) -> None: - raise NotImplementedError() # pragma: nocover - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> AsyncNetworkStream: - raise NotImplementedError() # pragma: nocover - - def get_extra_info(self, info: str) -> typing.Any: - return None # pragma: nocover - - -class AsyncNetworkBackend: - async def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: - raise NotImplementedError() # pragma: nocover - - async def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: - raise NotImplementedError() # pragma: nocover - - async def sleep(self, seconds: float) -> None: - raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py b/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py deleted file mode 100644 index 9b6edca0..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py +++ /dev/null @@ -1,143 +0,0 @@ -from __future__ import annotations - -import ssl -import typing - -from .._exceptions import ReadError -from .base import ( - SOCKET_OPTION, - AsyncNetworkBackend, - AsyncNetworkStream, - NetworkBackend, - NetworkStream, -) - - -class MockSSLObject: - def __init__(self, http2: bool): - self._http2 = http2 - - def selected_alpn_protocol(self) -> str: - return "h2" if self._http2 else "http/1.1" - - -class MockStream(NetworkStream): - def __init__(self, buffer: list[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - self._closed = False - - def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - if self._closed: - raise ReadError("Connection closed") - if not self._buffer: - return b"" - return self._buffer.pop(0) - - def write(self, buffer: bytes, timeout: float | None = None) -> None: - pass - - def close(self) -> None: - self._closed = True - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> NetworkStream: - return self - - def get_extra_info(self, info: str) -> typing.Any: - return MockSSLObject(http2=self._http2) if info == "ssl_object" else None - - def __repr__(self) -> str: - return "" - - -class MockBackend(NetworkBackend): - def __init__(self, buffer: list[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - - def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> NetworkStream: - return MockStream(list(self._buffer), http2=self._http2) - - def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> NetworkStream: - return MockStream(list(self._buffer), http2=self._http2) - - def sleep(self, seconds: float) -> None: - pass - - -class AsyncMockStream(AsyncNetworkStream): - def __init__(self, buffer: list[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - self._closed = False - - async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - if self._closed: - raise ReadError("Connection closed") - if not self._buffer: - return b"" - return self._buffer.pop(0) - - async def write(self, buffer: bytes, timeout: float | None = None) -> None: - pass - - async def aclose(self) -> None: - self._closed = True - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> AsyncNetworkStream: - return self - - def get_extra_info(self, info: str) -> typing.Any: - return MockSSLObject(http2=self._http2) if info == "ssl_object" else None - - def __repr__(self) -> str: - return "" - - -class AsyncMockBackend(AsyncNetworkBackend): - def __init__(self, buffer: list[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - - async def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: - return AsyncMockStream(list(self._buffer), http2=self._http2) - - async def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: - return AsyncMockStream(list(self._buffer), http2=self._http2) - - async def sleep(self, seconds: float) -> None: - pass diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py b/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py deleted file mode 100644 index 4018a09c..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py +++ /dev/null @@ -1,241 +0,0 @@ -from __future__ import annotations - -import functools -import socket -import ssl -import sys -import typing - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ExceptionMapping, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._utils import is_socket_readable -from .base import SOCKET_OPTION, NetworkBackend, NetworkStream - - -class TLSinTLSStream(NetworkStream): # pragma: no cover - """ - Because the standard `SSLContext.wrap_socket` method does - not work for `SSLSocket` objects, we need this class - to implement TLS stream using an underlying `SSLObject` - instance in order to support TLS on top of TLS. - """ - - # Defined in RFC 8449 - TLS_RECORD_SIZE = 16384 - - def __init__( - self, - sock: socket.socket, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ): - self._sock = sock - self._incoming = ssl.MemoryBIO() - self._outgoing = ssl.MemoryBIO() - - self.ssl_obj = ssl_context.wrap_bio( - incoming=self._incoming, - outgoing=self._outgoing, - server_hostname=server_hostname, - ) - - self._sock.settimeout(timeout) - self._perform_io(self.ssl_obj.do_handshake) - - def _perform_io( - self, - func: typing.Callable[..., typing.Any], - ) -> typing.Any: - ret = None - - while True: - errno = None - try: - ret = func() - except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: - errno = e.errno - - self._sock.sendall(self._outgoing.read()) - - if errno == ssl.SSL_ERROR_WANT_READ: - buf = self._sock.recv(self.TLS_RECORD_SIZE) - - if buf: - self._incoming.write(buf) - else: - self._incoming.write_eof() - if errno is None: - return ret - - def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - return typing.cast( - bytes, self._perform_io(functools.partial(self.ssl_obj.read, max_bytes)) - ) - - def write(self, buffer: bytes, timeout: float | None = None) -> None: - exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - while buffer: - nsent = self._perform_io(functools.partial(self.ssl_obj.write, buffer)) - buffer = buffer[nsent:] - - def close(self) -> None: - self._sock.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> NetworkStream: - raise NotImplementedError() - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object": - return self.ssl_obj - if info == "client_addr": - return self._sock.getsockname() - if info == "server_addr": - return self._sock.getpeername() - if info == "socket": - return self._sock - if info == "is_readable": - return is_socket_readable(self._sock) - return None - - -class SyncStream(NetworkStream): - def __init__(self, sock: socket.socket) -> None: - self._sock = sock - - def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - return self._sock.recv(max_bytes) - - def write(self, buffer: bytes, timeout: float | None = None) -> None: - if not buffer: - return - - exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} - with map_exceptions(exc_map): - while buffer: - self._sock.settimeout(timeout) - n = self._sock.send(buffer) - buffer = buffer[n:] - - def close(self) -> None: - self._sock.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> NetworkStream: - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - with map_exceptions(exc_map): - try: - if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover - # If the underlying socket has already been upgraded - # to the TLS layer (i.e. is an instance of SSLSocket), - # we need some additional smarts to support TLS-in-TLS. - return TLSinTLSStream( - self._sock, ssl_context, server_hostname, timeout - ) - else: - self._sock.settimeout(timeout) - sock = ssl_context.wrap_socket( - self._sock, server_hostname=server_hostname - ) - except Exception as exc: # pragma: nocover - self.close() - raise exc - return SyncStream(sock) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): - return self._sock._sslobj # type: ignore - if info == "client_addr": - return self._sock.getsockname() - if info == "server_addr": - return self._sock.getpeername() - if info == "socket": - return self._sock - if info == "is_readable": - return is_socket_readable(self._sock) - return None - - -class SyncBackend(NetworkBackend): - def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> NetworkStream: - # Note that we automatically include `TCP_NODELAY` - # in addition to any other custom socket options. - if socket_options is None: - socket_options = [] # pragma: no cover - address = (host, port) - source_address = None if local_address is None else (local_address, 0) - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - - with map_exceptions(exc_map): - sock = socket.create_connection( - address, - timeout, - source_address=source_address, - ) - for option in socket_options: - sock.setsockopt(*option) # pragma: no cover - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - return SyncStream(sock) - - def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> NetworkStream: # pragma: nocover - if sys.platform == "win32": - raise RuntimeError( - "Attempted to connect to a UNIX socket on a Windows system." - ) - if socket_options is None: - socket_options = [] - - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - with map_exceptions(exc_map): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - for option in socket_options: - sock.setsockopt(*option) - sock.settimeout(timeout) - sock.connect(path) - return SyncStream(sock) diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py b/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py deleted file mode 100644 index 6f53f5f2..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py +++ /dev/null @@ -1,159 +0,0 @@ -from __future__ import annotations - -import ssl -import typing - -import trio - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ExceptionMapping, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class TrioStream(AsyncNetworkStream): - def __init__(self, stream: trio.abc.Stream) -> None: - self._stream = stream - - async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ReadTimeout, - trio.BrokenResourceError: ReadError, - trio.ClosedResourceError: ReadError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - data: bytes = await self._stream.receive_some(max_bytes=max_bytes) - return data - - async def write(self, buffer: bytes, timeout: float | None = None) -> None: - if not buffer: - return - - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: WriteTimeout, - trio.BrokenResourceError: WriteError, - trio.ClosedResourceError: WriteError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - await self._stream.send_all(data=buffer) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> AsyncNetworkStream: - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - } - ssl_stream = trio.SSLStream( - self._stream, - ssl_context=ssl_context, - server_hostname=server_hostname, - https_compatible=True, - server_side=False, - ) - with map_exceptions(exc_map): - try: - with trio.fail_after(timeout_or_inf): - await ssl_stream.do_handshake() - except Exception as exc: # pragma: nocover - await self.aclose() - raise exc - return TrioStream(ssl_stream) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): - # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. - # Tracked at https://github.com/python-trio/trio/issues/542 - return self._stream._ssl_object # type: ignore[attr-defined] - if info == "client_addr": - return self._get_socket_stream().socket.getsockname() - if info == "server_addr": - return self._get_socket_stream().socket.getpeername() - if info == "socket": - stream = self._stream - while isinstance(stream, trio.SSLStream): - stream = stream.transport_stream - assert isinstance(stream, trio.SocketStream) - return stream.socket - if info == "is_readable": - socket = self.get_extra_info("socket") - return socket.is_readable() - return None - - def _get_socket_stream(self) -> trio.SocketStream: - stream = self._stream - while isinstance(stream, trio.SSLStream): - stream = stream.transport_stream - assert isinstance(stream, trio.SocketStream) - return stream - - -class TrioBackend(AsyncNetworkBackend): - async def connect_tcp( - self, - host: str, - port: int, - timeout: float | None = None, - local_address: str | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: - # By default for TCP sockets, trio enables TCP_NODELAY. - # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream - if socket_options is None: - socket_options = [] # pragma: no cover - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - OSError: ConnectError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - stream: trio.abc.Stream = await trio.open_tcp_stream( - host=host, port=port, local_address=local_address - ) - for option in socket_options: - stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return TrioStream(stream) - - async def connect_unix_socket( - self, - path: str, - timeout: float | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - OSError: ConnectError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - stream: trio.abc.Stream = await trio.open_unix_socket(path) - for option in socket_options: - stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return TrioStream(stream) - - async def sleep(self, seconds: float) -> None: - await trio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_exceptions.py b/venv/lib/python3.12/site-packages/httpcore/_exceptions.py deleted file mode 100644 index bc28d44f..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_exceptions.py +++ /dev/null @@ -1,81 +0,0 @@ -import contextlib -import typing - -ExceptionMapping = typing.Mapping[typing.Type[Exception], typing.Type[Exception]] - - -@contextlib.contextmanager -def map_exceptions(map: ExceptionMapping) -> typing.Iterator[None]: - try: - yield - except Exception as exc: # noqa: PIE786 - for from_exc, to_exc in map.items(): - if isinstance(exc, from_exc): - raise to_exc(exc) from exc - raise # pragma: nocover - - -class ConnectionNotAvailable(Exception): - pass - - -class ProxyError(Exception): - pass - - -class UnsupportedProtocol(Exception): - pass - - -class ProtocolError(Exception): - pass - - -class RemoteProtocolError(ProtocolError): - pass - - -class LocalProtocolError(ProtocolError): - pass - - -# Timeout errors - - -class TimeoutException(Exception): - pass - - -class PoolTimeout(TimeoutException): - pass - - -class ConnectTimeout(TimeoutException): - pass - - -class ReadTimeout(TimeoutException): - pass - - -class WriteTimeout(TimeoutException): - pass - - -# Network errors - - -class NetworkError(Exception): - pass - - -class ConnectError(NetworkError): - pass - - -class ReadError(NetworkError): - pass - - -class WriteError(NetworkError): - pass diff --git a/venv/lib/python3.12/site-packages/httpcore/_models.py b/venv/lib/python3.12/site-packages/httpcore/_models.py deleted file mode 100644 index 8a65f133..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_models.py +++ /dev/null @@ -1,516 +0,0 @@ -from __future__ import annotations - -import base64 -import ssl -import typing -import urllib.parse - -# Functions for typechecking... - - -ByteOrStr = typing.Union[bytes, str] -HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] -HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] -HeaderTypes = typing.Union[HeadersAsSequence, HeadersAsMapping, None] - -Extensions = typing.MutableMapping[str, typing.Any] - - -def enforce_bytes(value: bytes | str, *, name: str) -> bytes: - """ - Any arguments that are ultimately represented as bytes can be specified - either as bytes or as strings. - - However we enforce that any string arguments must only contain characters in - the plain ASCII range. chr(0)...chr(127). If you need to use characters - outside that range then be precise, and use a byte-wise argument. - """ - if isinstance(value, str): - try: - return value.encode("ascii") - except UnicodeEncodeError: - raise TypeError(f"{name} strings may not include unicode characters.") - elif isinstance(value, bytes): - return value - - seen_type = type(value).__name__ - raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") - - -def enforce_url(value: URL | bytes | str, *, name: str) -> URL: - """ - Type check for URL parameters. - """ - if isinstance(value, (bytes, str)): - return URL(value) - elif isinstance(value, URL): - return value - - seen_type = type(value).__name__ - raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") - - -def enforce_headers( - value: HeadersAsMapping | HeadersAsSequence | None = None, *, name: str -) -> list[tuple[bytes, bytes]]: - """ - Convienence function that ensure all items in request or response headers - are either bytes or strings in the plain ASCII range. - """ - if value is None: - return [] - elif isinstance(value, typing.Mapping): - return [ - ( - enforce_bytes(k, name="header name"), - enforce_bytes(v, name="header value"), - ) - for k, v in value.items() - ] - elif isinstance(value, typing.Sequence): - return [ - ( - enforce_bytes(k, name="header name"), - enforce_bytes(v, name="header value"), - ) - for k, v in value - ] - - seen_type = type(value).__name__ - raise TypeError( - f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." - ) - - -def enforce_stream( - value: bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes] | None, - *, - name: str, -) -> typing.Iterable[bytes] | typing.AsyncIterable[bytes]: - if value is None: - return ByteStream(b"") - elif isinstance(value, bytes): - return ByteStream(value) - return value - - -# * https://tools.ietf.org/html/rfc3986#section-3.2.3 -# * https://url.spec.whatwg.org/#url-miscellaneous -# * https://url.spec.whatwg.org/#scheme-state -DEFAULT_PORTS = { - b"ftp": 21, - b"http": 80, - b"https": 443, - b"ws": 80, - b"wss": 443, -} - - -def include_request_headers( - headers: list[tuple[bytes, bytes]], - *, - url: "URL", - content: None | bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes], -) -> list[tuple[bytes, bytes]]: - headers_set = set(k.lower() for k, v in headers) - - if b"host" not in headers_set: - default_port = DEFAULT_PORTS.get(url.scheme) - if url.port is None or url.port == default_port: - header_value = url.host - else: - header_value = b"%b:%d" % (url.host, url.port) - headers = [(b"Host", header_value)] + headers - - if ( - content is not None - and b"content-length" not in headers_set - and b"transfer-encoding" not in headers_set - ): - if isinstance(content, bytes): - content_length = str(len(content)).encode("ascii") - headers += [(b"Content-Length", content_length)] - else: - headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover - - return headers - - -# Interfaces for byte streams... - - -class ByteStream: - """ - A container for non-streaming content, and that supports both sync and async - stream iteration. - """ - - def __init__(self, content: bytes) -> None: - self._content = content - - def __iter__(self) -> typing.Iterator[bytes]: - yield self._content - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - yield self._content - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" - - -class Origin: - def __init__(self, scheme: bytes, host: bytes, port: int) -> None: - self.scheme = scheme - self.host = host - self.port = port - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, Origin) - and self.scheme == other.scheme - and self.host == other.host - and self.port == other.port - ) - - def __str__(self) -> str: - scheme = self.scheme.decode("ascii") - host = self.host.decode("ascii") - port = str(self.port) - return f"{scheme}://{host}:{port}" - - -class URL: - """ - Represents the URL against which an HTTP request may be made. - - The URL may either be specified as a plain string, for convienence: - - ```python - url = httpcore.URL("https://www.example.com/") - ``` - - Or be constructed with explicitily pre-parsed components: - - ```python - url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') - ``` - - Using this second more explicit style allows integrations that are using - `httpcore` to pass through URLs that have already been parsed in order to use - libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures - that URL parsing is treated identically at both the networking level and at any - higher layers of abstraction. - - The four components are important here, as they allow the URL to be precisely - specified in a pre-parsed format. They also allow certain types of request to - be created that could not otherwise be expressed. - - For example, an HTTP request to `http://www.example.com/` forwarded via a proxy - at `http://localhost:8080`... - - ```python - # Constructs an HTTP request with a complete URL as the target: - # GET https://www.example.com/ HTTP/1.1 - url = httpcore.URL( - scheme=b'http', - host=b'localhost', - port=8080, - target=b'https://www.example.com/' - ) - request = httpcore.Request( - method="GET", - url=url - ) - ``` - - Another example is constructing an `OPTIONS *` request... - - ```python - # Constructs an 'OPTIONS *' HTTP request: - # OPTIONS * HTTP/1.1 - url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') - request = httpcore.Request(method="OPTIONS", url=url) - ``` - - This kind of request is not possible to formulate with a URL string, - because the `/` delimiter is always used to demark the target from the - host/port portion of the URL. - - For convenience, string-like arguments may be specified either as strings or - as bytes. However, once a request is being issue over-the-wire, the URL - components are always ultimately required to be a bytewise representation. - - In order to avoid any ambiguity over character encodings, when strings are used - as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. - If you require a bytewise representation that is outside this range you must - handle the character encoding directly, and pass a bytes instance. - """ - - def __init__( - self, - url: bytes | str = "", - *, - scheme: bytes | str = b"", - host: bytes | str = b"", - port: int | None = None, - target: bytes | str = b"", - ) -> None: - """ - Parameters: - url: The complete URL as a string or bytes. - scheme: The URL scheme as a string or bytes. - Typically either `"http"` or `"https"`. - host: The URL host as a string or bytes. Such as `"www.example.com"`. - port: The port to connect to. Either an integer or `None`. - target: The target of the HTTP request. Such as `"/items?search=red"`. - """ - if url: - parsed = urllib.parse.urlparse(enforce_bytes(url, name="url")) - self.scheme = parsed.scheme - self.host = parsed.hostname or b"" - self.port = parsed.port - self.target = (parsed.path or b"/") + ( - b"?" + parsed.query if parsed.query else b"" - ) - else: - self.scheme = enforce_bytes(scheme, name="scheme") - self.host = enforce_bytes(host, name="host") - self.port = port - self.target = enforce_bytes(target, name="target") - - @property - def origin(self) -> Origin: - default_port = { - b"http": 80, - b"https": 443, - b"ws": 80, - b"wss": 443, - b"socks5": 1080, - b"socks5h": 1080, - }[self.scheme] - return Origin( - scheme=self.scheme, host=self.host, port=self.port or default_port - ) - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, URL) - and other.scheme == self.scheme - and other.host == self.host - and other.port == self.port - and other.target == self.target - ) - - def __bytes__(self) -> bytes: - if self.port is None: - return b"%b://%b%b" % (self.scheme, self.host, self.target) - return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) - - def __repr__(self) -> str: - return ( - f"{self.__class__.__name__}(scheme={self.scheme!r}, " - f"host={self.host!r}, port={self.port!r}, target={self.target!r})" - ) - - -class Request: - """ - An HTTP request. - """ - - def __init__( - self, - method: bytes | str, - url: URL | bytes | str, - *, - headers: HeaderTypes = None, - content: bytes - | typing.Iterable[bytes] - | typing.AsyncIterable[bytes] - | None = None, - extensions: Extensions | None = None, - ) -> None: - """ - Parameters: - method: The HTTP request method, either as a string or bytes. - For example: `GET`. - url: The request URL, either as a `URL` instance, or as a string or bytes. - For example: `"https://www.example.com".` - headers: The HTTP request headers. - content: The content of the request body. - extensions: A dictionary of optional extra information included on - the request. Possible keys include `"timeout"`, and `"trace"`. - """ - self.method: bytes = enforce_bytes(method, name="method") - self.url: URL = enforce_url(url, name="url") - self.headers: list[tuple[bytes, bytes]] = enforce_headers( - headers, name="headers" - ) - self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( - enforce_stream(content, name="content") - ) - self.extensions = {} if extensions is None else extensions - - if "target" in self.extensions: - self.url = URL( - scheme=self.url.scheme, - host=self.url.host, - port=self.url.port, - target=self.extensions["target"], - ) - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.method!r}]>" - - -class Response: - """ - An HTTP response. - """ - - def __init__( - self, - status: int, - *, - headers: HeaderTypes = None, - content: bytes - | typing.Iterable[bytes] - | typing.AsyncIterable[bytes] - | None = None, - extensions: Extensions | None = None, - ) -> None: - """ - Parameters: - status: The HTTP status code of the response. For example `200`. - headers: The HTTP response headers. - content: The content of the response body. - extensions: A dictionary of optional extra information included on - the responseself.Possible keys include `"http_version"`, - `"reason_phrase"`, and `"network_stream"`. - """ - self.status: int = status - self.headers: list[tuple[bytes, bytes]] = enforce_headers( - headers, name="headers" - ) - self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( - enforce_stream(content, name="content") - ) - self.extensions = {} if extensions is None else extensions - - self._stream_consumed = False - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - if isinstance(self.stream, typing.Iterable): - raise RuntimeError( - "Attempted to access 'response.content' on a streaming response. " - "Call 'response.read()' first." - ) - else: - raise RuntimeError( - "Attempted to access 'response.content' on a streaming response. " - "Call 'await response.aread()' first." - ) - return self._content - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.status}]>" - - # Sync interface... - - def read(self) -> bytes: - if not isinstance(self.stream, typing.Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to read an asynchronous response using 'response.read()'. " - "You should use 'await response.aread()' instead." - ) - if not hasattr(self, "_content"): - self._content = b"".join([part for part in self.iter_stream()]) - return self._content - - def iter_stream(self) -> typing.Iterator[bytes]: - if not isinstance(self.stream, typing.Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to stream an asynchronous response using 'for ... in " - "response.iter_stream()'. " - "You should use 'async for ... in response.aiter_stream()' instead." - ) - if self._stream_consumed: - raise RuntimeError( - "Attempted to call 'for ... in response.iter_stream()' more than once." - ) - self._stream_consumed = True - for chunk in self.stream: - yield chunk - - def close(self) -> None: - if not isinstance(self.stream, typing.Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to close an asynchronous response using 'response.close()'. " - "You should use 'await response.aclose()' instead." - ) - if hasattr(self.stream, "close"): - self.stream.close() - - # Async interface... - - async def aread(self) -> bytes: - if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to read an synchronous response using " - "'await response.aread()'. " - "You should use 'response.read()' instead." - ) - if not hasattr(self, "_content"): - self._content = b"".join([part async for part in self.aiter_stream()]) - return self._content - - async def aiter_stream(self) -> typing.AsyncIterator[bytes]: - if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to stream an synchronous response using 'async for ... in " - "response.aiter_stream()'. " - "You should use 'for ... in response.iter_stream()' instead." - ) - if self._stream_consumed: - raise RuntimeError( - "Attempted to call 'async for ... in response.aiter_stream()' " - "more than once." - ) - self._stream_consumed = True - async for chunk in self.stream: - yield chunk - - async def aclose(self) -> None: - if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to close a synchronous response using " - "'await response.aclose()'. " - "You should use 'response.close()' instead." - ) - if hasattr(self.stream, "aclose"): - await self.stream.aclose() - - -class Proxy: - def __init__( - self, - url: URL | bytes | str, - auth: tuple[bytes | str, bytes | str] | None = None, - headers: HeadersAsMapping | HeadersAsSequence | None = None, - ssl_context: ssl.SSLContext | None = None, - ): - self.url = enforce_url(url, name="url") - self.headers = enforce_headers(headers, name="headers") - self.ssl_context = ssl_context - - if auth is not None: - username = enforce_bytes(auth[0], name="auth") - password = enforce_bytes(auth[1], name="auth") - userpass = username + b":" + password - authorization = b"Basic " + base64.b64encode(userpass) - self.auth: tuple[bytes, bytes] | None = (username, password) - self.headers = [(b"Proxy-Authorization", authorization)] + self.headers - else: - self.auth = None diff --git a/venv/lib/python3.12/site-packages/httpcore/_ssl.py b/venv/lib/python3.12/site-packages/httpcore/_ssl.py deleted file mode 100644 index c99c5a67..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_ssl.py +++ /dev/null @@ -1,9 +0,0 @@ -import ssl - -import certifi - - -def default_ssl_context() -> ssl.SSLContext: - context = ssl.create_default_context() - context.load_verify_locations(certifi.where()) - return context diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py deleted file mode 100644 index b476d76d..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from .connection import HTTPConnection -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .http_proxy import HTTPProxy -from .interfaces import ConnectionInterface - -try: - from .http2 import HTTP2Connection -except ImportError: # pragma: nocover - - class HTTP2Connection: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use http2 support, but the `h2` package is not " - "installed. Use 'pip install httpcore[http2]'." - ) - - -try: - from .socks_proxy import SOCKSProxy -except ImportError: # pragma: nocover - - class SOCKSProxy: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use SOCKS support, but the `socksio` package is not " - "installed. Use 'pip install httpcore[socks]'." - ) - - -__all__ = [ - "HTTPConnection", - "ConnectionPool", - "HTTPProxy", - "HTTP11Connection", - "HTTP2Connection", - "ConnectionInterface", - "SOCKSProxy", -] diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index b0018af4..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc deleted file mode 100644 index 55d78274..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc deleted file mode 100644 index fc34d738..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc deleted file mode 100644 index aa5ae941..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc deleted file mode 100644 index 7c674cc9..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc deleted file mode 100644 index 4882d372..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc deleted file mode 100644 index 8a3a89d4..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc deleted file mode 100644 index 6c1fd0cc..00000000 Binary files a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py b/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py deleted file mode 100644 index 363f8be8..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py +++ /dev/null @@ -1,222 +0,0 @@ -from __future__ import annotations - -import itertools -import logging -import ssl -import types -import typing - -from .._backends.sync import SyncBackend -from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream -from .._exceptions import ConnectError, ConnectTimeout -from .._models import Origin, Request, Response -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. - - -logger = logging.getLogger("httpcore.connection") - - -def exponential_backoff(factor: float) -> typing.Iterator[float]: - """ - Generate a geometric sequence that has a ratio of 2 and starts with 0. - - For example: - - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` - """ - yield 0 - for n in itertools.count(): - yield factor * 2**n - - -class HTTPConnection(ConnectionInterface): - def __init__( - self, - origin: Origin, - ssl_context: ssl.SSLContext | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: str | None = None, - uds: str | None = None, - network_backend: NetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - self._origin = origin - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend: NetworkBackend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._connection: ConnectionInterface | None = None - self._connect_failed: bool = False - self._request_lock = Lock() - self._socket_options = socket_options - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection to {self._origin}" - ) - - try: - with self._request_lock: - if self._connection is None: - stream = self._connect(request) - - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except BaseException as exc: - self._connect_failed = True - raise exc - - return self._connection.handle_request(request) - - def _connect(self, request: Request) -> NetworkStream: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - retries_left = self._retries - delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) - - while True: - try: - if self._uds is None: - kwargs = { - "host": self._origin.host.decode("ascii"), - "port": self._origin.port, - "local_address": self._local_address, - "timeout": timeout, - "socket_options": self._socket_options, - } - with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - else: - kwargs = { - "path": self._uds, - "timeout": timeout, - "socket_options": self._socket_options, - } - with Trace( - "connect_unix_socket", logger, request, kwargs - ) as trace: - stream = self._network_backend.connect_unix_socket( - **kwargs - ) - trace.return_value = stream - - if self._origin.scheme in (b"https", b"wss"): - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - return stream - except (ConnectError, ConnectTimeout): - if retries_left <= 0: - raise - retries_left -= 1 - delay = next(delays) - with Trace("retry", logger, request, kwargs) as trace: - self._network_backend.sleep(delay) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def close(self) -> None: - if self._connection is not None: - with Trace("close", logger, None, {}): - self._connection.close() - - def is_available(self) -> bool: - if self._connection is None: - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> HTTPConnection: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - self.close() diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py b/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py deleted file mode 100644 index 9ccfa53e..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py +++ /dev/null @@ -1,420 +0,0 @@ -from __future__ import annotations - -import ssl -import sys -import types -import typing - -from .._backends.sync import SyncBackend -from .._backends.base import SOCKET_OPTION, NetworkBackend -from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol -from .._models import Origin, Proxy, Request, Response -from .._synchronization import Event, ShieldCancellation, ThreadLock -from .connection import HTTPConnection -from .interfaces import ConnectionInterface, RequestInterface - - -class PoolRequest: - def __init__(self, request: Request) -> None: - self.request = request - self.connection: ConnectionInterface | None = None - self._connection_acquired = Event() - - def assign_to_connection(self, connection: ConnectionInterface | None) -> None: - self.connection = connection - self._connection_acquired.set() - - def clear_connection(self) -> None: - self.connection = None - self._connection_acquired = Event() - - def wait_for_connection( - self, timeout: float | None = None - ) -> ConnectionInterface: - if self.connection is None: - self._connection_acquired.wait(timeout=timeout) - assert self.connection is not None - return self.connection - - def is_queued(self) -> bool: - return self.connection is None - - -class ConnectionPool(RequestInterface): - """ - A connection pool for making HTTP requests. - """ - - def __init__( - self, - ssl_context: ssl.SSLContext | None = None, - proxy: Proxy | None = None, - max_connections: int | None = 10, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: str | None = None, - uds: str | None = None, - network_backend: NetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish a - connection. - local_address: Local address to connect from. Can also be used to connect - using a particular address family. Using `local_address="0.0.0.0"` - will connect using an `AF_INET` address (IPv4), while using - `local_address="::"` will connect using an `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - socket_options: Socket options that have to be included - in the TCP socket when the connection was established. - """ - self._ssl_context = ssl_context - self._proxy = proxy - self._max_connections = ( - sys.maxsize if max_connections is None else max_connections - ) - self._max_keepalive_connections = ( - sys.maxsize - if max_keepalive_connections is None - else max_keepalive_connections - ) - self._max_keepalive_connections = min( - self._max_connections, self._max_keepalive_connections - ) - - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._socket_options = socket_options - - # The mutable state on a connection pool is the queue of incoming requests, - # and the set of connections that are servicing those requests. - self._connections: list[ConnectionInterface] = [] - self._requests: list[PoolRequest] = [] - - # We only mutate the state of the connection pool within an 'optional_thread_lock' - # context. This holds a threading lock unless we're running in async mode, - # in which case it is a no-op. - self._optional_thread_lock = ThreadLock() - - def create_connection(self, origin: Origin) -> ConnectionInterface: - if self._proxy is not None: - if self._proxy.url.scheme in (b"socks5", b"socks5h"): - from .socks_proxy import Socks5Connection - - return Socks5Connection( - proxy_origin=self._proxy.url.origin, - proxy_auth=self._proxy.auth, - remote_origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - elif origin.scheme == b"http": - from .http_proxy import ForwardHTTPConnection - - return ForwardHTTPConnection( - proxy_origin=self._proxy.url.origin, - proxy_headers=self._proxy.headers, - proxy_ssl_context=self._proxy.ssl_context, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - ) - from .http_proxy import TunnelHTTPConnection - - return TunnelHTTPConnection( - proxy_origin=self._proxy.url.origin, - proxy_headers=self._proxy.headers, - proxy_ssl_context=self._proxy.ssl_context, - remote_origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - return HTTPConnection( - origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - retries=self._retries, - local_address=self._local_address, - uds=self._uds, - network_backend=self._network_backend, - socket_options=self._socket_options, - ) - - @property - def connections(self) -> list[ConnectionInterface]: - """ - Return a list of the connections currently in the pool. - - For example: - - ```python - >>> pool.connections - [ - , - , - , - ] - ``` - """ - return list(self._connections) - - def handle_request(self, request: Request) -> Response: - """ - Send an HTTP request, and return an HTTP response. - - This is the core implementation that is called into by `.request()` or `.stream()`. - """ - scheme = request.url.scheme.decode() - if scheme == "": - raise UnsupportedProtocol( - "Request URL is missing an 'http://' or 'https://' protocol." - ) - if scheme not in ("http", "https", "ws", "wss"): - raise UnsupportedProtocol( - f"Request URL has an unsupported protocol '{scheme}://'." - ) - - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("pool", None) - - with self._optional_thread_lock: - # Add the incoming request to our request queue. - pool_request = PoolRequest(request) - self._requests.append(pool_request) - - try: - while True: - with self._optional_thread_lock: - # Assign incoming requests to available connections, - # closing or creating new connections as required. - closing = self._assign_requests_to_connections() - self._close_connections(closing) - - # Wait until this request has an assigned connection. - connection = pool_request.wait_for_connection(timeout=timeout) - - try: - # Send the request on the assigned connection. - response = connection.handle_request( - pool_request.request - ) - except ConnectionNotAvailable: - # In some cases a connection may initially be available to - # handle a request, but then become unavailable. - # - # In this case we clear the connection and try again. - pool_request.clear_connection() - else: - break # pragma: nocover - - except BaseException as exc: - with self._optional_thread_lock: - # For any exception or cancellation we remove the request from - # the queue, and then re-assign requests to connections. - self._requests.remove(pool_request) - closing = self._assign_requests_to_connections() - - self._close_connections(closing) - raise exc from None - - # Return the response. Note that in this case we still have to manage - # the point at which the response is closed. - assert isinstance(response.stream, typing.Iterable) - return Response( - status=response.status, - headers=response.headers, - content=PoolByteStream( - stream=response.stream, pool_request=pool_request, pool=self - ), - extensions=response.extensions, - ) - - def _assign_requests_to_connections(self) -> list[ConnectionInterface]: - """ - Manage the state of the connection pool, assigning incoming - requests to connections as available. - - Called whenever a new request is added or removed from the pool. - - Any closing connections are returned, allowing the I/O for closing - those connections to be handled seperately. - """ - closing_connections = [] - - # First we handle cleaning up any connections that are closed, - # have expired their keep-alive, or surplus idle connections. - for connection in list(self._connections): - if connection.is_closed(): - # log: "removing closed connection" - self._connections.remove(connection) - elif connection.has_expired(): - # log: "closing expired connection" - self._connections.remove(connection) - closing_connections.append(connection) - elif ( - connection.is_idle() - and len([connection.is_idle() for connection in self._connections]) - > self._max_keepalive_connections - ): - # log: "closing idle connection" - self._connections.remove(connection) - closing_connections.append(connection) - - # Assign queued requests to connections. - queued_requests = [request for request in self._requests if request.is_queued()] - for pool_request in queued_requests: - origin = pool_request.request.url.origin - available_connections = [ - connection - for connection in self._connections - if connection.can_handle_request(origin) and connection.is_available() - ] - idle_connections = [ - connection for connection in self._connections if connection.is_idle() - ] - - # There are three cases for how we may be able to handle the request: - # - # 1. There is an existing connection that can handle the request. - # 2. We can create a new connection to handle the request. - # 3. We can close an idle connection and then create a new connection - # to handle the request. - if available_connections: - # log: "reusing existing connection" - connection = available_connections[0] - pool_request.assign_to_connection(connection) - elif len(self._connections) < self._max_connections: - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - elif idle_connections: - # log: "closing idle connection" - connection = idle_connections[0] - self._connections.remove(connection) - closing_connections.append(connection) - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - - return closing_connections - - def _close_connections(self, closing: list[ConnectionInterface]) -> None: - # Close connections which have been removed from the pool. - with ShieldCancellation(): - for connection in closing: - connection.close() - - def close(self) -> None: - # Explicitly close the connection pool. - # Clears all existing requests and connections. - with self._optional_thread_lock: - closing_connections = list(self._connections) - self._connections = [] - self._close_connections(closing_connections) - - def __enter__(self) -> ConnectionPool: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - self.close() - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - with self._optional_thread_lock: - request_is_queued = [request.is_queued() for request in self._requests] - connection_is_idle = [ - connection.is_idle() for connection in self._connections - ] - - num_active_requests = request_is_queued.count(False) - num_queued_requests = request_is_queued.count(True) - num_active_connections = connection_is_idle.count(False) - num_idle_connections = connection_is_idle.count(True) - - requests_info = ( - f"Requests: {num_active_requests} active, {num_queued_requests} queued" - ) - connection_info = ( - f"Connections: {num_active_connections} active, {num_idle_connections} idle" - ) - - return f"<{class_name} [{requests_info} | {connection_info}]>" - - -class PoolByteStream: - def __init__( - self, - stream: typing.Iterable[bytes], - pool_request: PoolRequest, - pool: ConnectionPool, - ) -> None: - self._stream = stream - self._pool_request = pool_request - self._pool = pool - self._closed = False - - def __iter__(self) -> typing.Iterator[bytes]: - try: - for part in self._stream: - yield part - except BaseException as exc: - self.close() - raise exc from None - - def close(self) -> None: - if not self._closed: - self._closed = True - with ShieldCancellation(): - if hasattr(self._stream, "close"): - self._stream.close() - - with self._pool._optional_thread_lock: - self._pool._requests.remove(self._pool_request) - closing = self._pool._assign_requests_to_connections() - - self._pool._close_connections(closing) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py deleted file mode 100644 index ebd3a974..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py +++ /dev/null @@ -1,379 +0,0 @@ -from __future__ import annotations - -import enum -import logging -import ssl -import time -import types -import typing - -import h11 - -from .._backends.base import NetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, - WriteError, - map_exceptions, -) -from .._models import Origin, Request, Response -from .._synchronization import Lock, ShieldCancellation -from .._trace import Trace -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.http11") - - -# A subset of `h11.Event` types supported by `_send_event` -H11SendEvent = typing.Union[ - h11.Request, - h11.Data, - h11.EndOfMessage, -] - - -class HTTPConnectionState(enum.IntEnum): - NEW = 0 - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class HTTP11Connection(ConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - - def __init__( - self, - origin: Origin, - stream: NetworkStream, - keepalive_expiry: float | None = None, - ) -> None: - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: float | None = keepalive_expiry - self._expire_at: float | None = None - self._state = HTTPConnectionState.NEW - self._state_lock = Lock() - self._request_count = 0 - self._h11_state = h11.Connection( - our_role=h11.CLIENT, - max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, - ) - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - with self._state_lock: - if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): - self._request_count += 1 - self._state = HTTPConnectionState.ACTIVE - self._expire_at = None - else: - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request} - try: - with Trace( - "send_request_headers", logger, request, kwargs - ) as trace: - self._send_request_headers(**kwargs) - with Trace("send_request_body", logger, request, kwargs) as trace: - self._send_request_body(**kwargs) - except WriteError: - # If we get a write error while we're writing the request, - # then we supress this error and move on to attempting to - # read the response. Servers can sometimes close the request - # pre-emptively and then respond with a well formed HTTP - # error response. - pass - - with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - ( - http_version, - status, - reason_phrase, - headers, - trailing_data, - ) = self._receive_response_headers(**kwargs) - trace.return_value = ( - http_version, - status, - reason_phrase, - headers, - ) - - network_stream = self._network_stream - - # CONNECT or Upgrade request - if (status == 101) or ( - (request.method == b"CONNECT") and (200 <= status < 300) - ): - network_stream = HTTP11UpgradeStream(network_stream, trailing_data) - - return Response( - status=status, - headers=headers, - content=HTTP11ConnectionByteStream(self, request), - extensions={ - "http_version": http_version, - "reason_phrase": reason_phrase, - "network_stream": network_stream, - }, - ) - except BaseException as exc: - with ShieldCancellation(): - with Trace("response_closed", logger, request) as trace: - self._response_closed() - raise exc - - # Sending the request... - - def _send_request_headers(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request( - method=request.method, - target=request.url.target, - headers=request.headers, - ) - self._send_event(event, timeout=timeout) - - def _send_request_body(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - assert isinstance(request.stream, typing.Iterable) - for chunk in request.stream: - event = h11.Data(data=chunk) - self._send_event(event, timeout=timeout) - - self._send_event(h11.EndOfMessage(), timeout=timeout) - - def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: - bytes_to_send = self._h11_state.send(event) - if bytes_to_send is not None: - self._network_stream.write(bytes_to_send, timeout=timeout) - - # Receiving the response... - - def _receive_response_headers( - self, request: Request - ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = self._receive_event(timeout=timeout) - if isinstance(event, h11.Response): - break - if ( - isinstance(event, h11.InformationalResponse) - and event.status_code == 101 - ): - break - - http_version = b"HTTP/" + event.http_version - - # h11 version 0.11+ supports a `raw_items` interface to get the - # raw header casing, rather than the enforced lowercase headers. - headers = event.headers.raw_items() - - trailing_data, _ = self._h11_state.trailing_data - - return http_version, event.status_code, event.reason, headers, trailing_data - - def _receive_response_body( - self, request: Request - ) -> typing.Iterator[bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = self._receive_event(timeout=timeout) - if isinstance(event, h11.Data): - yield bytes(event.data) - elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - break - - def _receive_event( - self, timeout: float | None = None - ) -> h11.Event | type[h11.PAUSED]: - while True: - with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): - event = self._h11_state.next_event() - - if event is h11.NEED_DATA: - data = self._network_stream.read( - self.READ_NUM_BYTES, timeout=timeout - ) - - # If we feed this case through h11 we'll raise an exception like: - # - # httpcore.RemoteProtocolError: can't handle event type - # ConnectionClosed when role=SERVER and state=SEND_RESPONSE - # - # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle this case distinctly and treat - # it as a ConnectError. - if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: - msg = "Server disconnected without sending a response." - raise RemoteProtocolError(msg) - - self._h11_state.receive_data(data) - else: - # mypy fails to narrow the type in the above if statement above - return event # type: ignore[return-value] - - def _response_closed(self) -> None: - with self._state_lock: - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._state = HTTPConnectionState.IDLE - self._h11_state.start_next_cycle() - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - else: - self.close() - - # Once the connection is no longer required... - - def close(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._state = HTTPConnectionState.CLOSED - self._network_stream.close() - - # The ConnectionInterface methods provide information about the state of - # the connection, allowing for a connection pooling implementation to - # determine when to reuse and when to close the connection... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - # Note that HTTP/1.1 connections in the "NEW" state are not treated as - # being "available". The control flow which created the connection will - # be able to send an outgoing request, but the connection will not be - # acquired from the connection pool for any other request. - return self._state == HTTPConnectionState.IDLE - - def has_expired(self) -> bool: - now = time.monotonic() - keepalive_expired = self._expire_at is not None and now > self._expire_at - - # If the HTTP connection is idle but the socket is readable, then the - # only valid state is that the socket is about to return b"", indicating - # a server-initiated disconnect. - server_disconnected = ( - self._state == HTTPConnectionState.IDLE - and self._network_stream.get_extra_info("is_readable") - ) - - return keepalive_expired or server_disconnected - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/1.1, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> HTTP11Connection: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - self.close() - - -class HTTP11ConnectionByteStream: - def __init__(self, connection: HTTP11Connection, request: Request) -> None: - self._connection = connection - self._request = request - self._closed = False - - def __iter__(self) -> typing.Iterator[bytes]: - kwargs = {"request": self._request} - try: - with Trace("receive_response_body", logger, self._request, kwargs): - for chunk in self._connection._receive_response_body(**kwargs): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with ShieldCancellation(): - self.close() - raise exc - - def close(self) -> None: - if not self._closed: - self._closed = True - with Trace("response_closed", logger, self._request): - self._connection._response_closed() - - -class HTTP11UpgradeStream(NetworkStream): - def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: - self._stream = stream - self._leading_data = leading_data - - def read(self, max_bytes: int, timeout: float | None = None) -> bytes: - if self._leading_data: - buffer = self._leading_data[:max_bytes] - self._leading_data = self._leading_data[max_bytes:] - return buffer - else: - return self._stream.read(max_bytes, timeout) - - def write(self, buffer: bytes, timeout: float | None = None) -> None: - self._stream.write(buffer, timeout) - - def close(self) -> None: - self._stream.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: str | None = None, - timeout: float | None = None, - ) -> NetworkStream: - return self._stream.start_tls(ssl_context, server_hostname, timeout) - - def get_extra_info(self, info: str) -> typing.Any: - return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py deleted file mode 100644 index ddcc1890..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py +++ /dev/null @@ -1,592 +0,0 @@ -from __future__ import annotations - -import enum -import logging -import time -import types -import typing - -import h2.config -import h2.connection -import h2.events -import h2.exceptions -import h2.settings - -from .._backends.base import NetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, -) -from .._models import Origin, Request, Response -from .._synchronization import Lock, Semaphore, ShieldCancellation -from .._trace import Trace -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.http2") - - -def has_body_headers(request: Request) -> bool: - return any( - k.lower() == b"content-length" or k.lower() == b"transfer-encoding" - for k, v in request.headers - ) - - -class HTTPConnectionState(enum.IntEnum): - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class HTTP2Connection(ConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) - - def __init__( - self, - origin: Origin, - stream: NetworkStream, - keepalive_expiry: float | None = None, - ): - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: float | None = keepalive_expiry - self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - self._state = HTTPConnectionState.IDLE - self._expire_at: float | None = None - self._request_count = 0 - self._init_lock = Lock() - self._state_lock = Lock() - self._read_lock = Lock() - self._write_lock = Lock() - self._sent_connection_init = False - self._used_all_stream_ids = False - self._connection_error = False - - # Mapping from stream ID to response stream events. - self._events: dict[ - int, - list[ - h2.events.ResponseReceived - | h2.events.DataReceived - | h2.events.StreamEnded - | h2.events.StreamReset, - ], - ] = {} - - # Connection terminated events are stored as state since - # we need to handle them for all streams. - self._connection_terminated: h2.events.ConnectionTerminated | None = None - - self._read_exception: Exception | None = None - self._write_exception: Exception | None = None - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - # This cannot occur in normal operation, since the connection pool - # will only send requests on connections that handle them. - # It's in place simply for resilience as a guard against incorrect - # usage, for anyone working directly with httpcore connections. - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - with self._state_lock: - if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): - self._request_count += 1 - self._expire_at = None - self._state = HTTPConnectionState.ACTIVE - else: - raise ConnectionNotAvailable() - - with self._init_lock: - if not self._sent_connection_init: - try: - sci_kwargs = {"request": request} - with Trace( - "send_connection_init", logger, request, sci_kwargs - ): - self._send_connection_init(**sci_kwargs) - except BaseException as exc: - with ShieldCancellation(): - self.close() - raise exc - - self._sent_connection_init = True - - # Initially start with just 1 until the remote server provides - # its max_concurrent_streams value - self._max_streams = 1 - - local_settings_max_streams = ( - self._h2_state.local_settings.max_concurrent_streams - ) - self._max_streams_semaphore = Semaphore(local_settings_max_streams) - - for _ in range(local_settings_max_streams - self._max_streams): - self._max_streams_semaphore.acquire() - - self._max_streams_semaphore.acquire() - - try: - stream_id = self._h2_state.get_next_available_stream_id() - self._events[stream_id] = [] - except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover - self._used_all_stream_ids = True - self._request_count -= 1 - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request, "stream_id": stream_id} - with Trace("send_request_headers", logger, request, kwargs): - self._send_request_headers(request=request, stream_id=stream_id) - with Trace("send_request_body", logger, request, kwargs): - self._send_request_body(request=request, stream_id=stream_id) - with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - status, headers = self._receive_response( - request=request, stream_id=stream_id - ) - trace.return_value = (status, headers) - - return Response( - status=status, - headers=headers, - content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), - extensions={ - "http_version": b"HTTP/2", - "network_stream": self._network_stream, - "stream_id": stream_id, - }, - ) - except BaseException as exc: # noqa: PIE786 - with ShieldCancellation(): - kwargs = {"stream_id": stream_id} - with Trace("response_closed", logger, request, kwargs): - self._response_closed(stream_id=stream_id) - - if isinstance(exc, h2.exceptions.ProtocolError): - # One case where h2 can raise a protocol error is when a - # closed frame has been seen by the state machine. - # - # This happens when one stream is reading, and encounters - # a GOAWAY event. Other flows of control may then raise - # a protocol error at any point they interact with the 'h2_state'. - # - # In this case we'll have stored the event, and should raise - # it as a RemoteProtocolError. - if self._connection_terminated: # pragma: nocover - raise RemoteProtocolError(self._connection_terminated) - # If h2 raises a protocol error in some other state then we - # must somehow have made a protocol violation. - raise LocalProtocolError(exc) # pragma: nocover - - raise exc - - def _send_connection_init(self, request: Request) -> None: - """ - The HTTP/2 connection requires some initial setup before we can start - using individual request/response streams on it. - """ - # Need to set these manually here instead of manipulating via - # __setitem__() otherwise the H2Connection will emit SettingsUpdate - # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = h2.settings.Settings( - client=True, - initial_values={ - # Disable PUSH_PROMISE frames from the server since we don't do anything - # with them for now. Maybe when we support caching? - h2.settings.SettingCodes.ENABLE_PUSH: 0, - # These two are taken from h2 for safe defaults - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, - h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, - }, - ) - - # Some websites (*cough* Yahoo *cough*) balk at this setting being - # present in the initial handshake since it's not defined in the original - # RFC despite the RFC mandating ignoring settings you don't know about. - del self._h2_state.local_settings[ - h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL - ] - - self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2**24) - self._write_outgoing_data(request) - - # Sending the request... - - def _send_request_headers(self, request: Request, stream_id: int) -> None: - """ - Send the request headers to a given stream ID. - """ - end_stream = not has_body_headers(request) - - # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. - # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require - # HTTP/1.1 style headers, and map them appropriately if we end up on - # an HTTP/2 connection. - authority = [v for k, v in request.headers if k.lower() == b"host"][0] - - headers = [ - (b":method", request.method), - (b":authority", authority), - (b":scheme", request.url.scheme), - (b":path", request.url.target), - ] + [ - (k.lower(), v) - for k, v in request.headers - if k.lower() - not in ( - b"host", - b"transfer-encoding", - ) - ] - - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) - self._write_outgoing_data(request) - - def _send_request_body(self, request: Request, stream_id: int) -> None: - """ - Iterate over the request body sending it to a given stream ID. - """ - if not has_body_headers(request): - return - - assert isinstance(request.stream, typing.Iterable) - for data in request.stream: - self._send_stream_data(request, stream_id, data) - self._send_end_stream(request, stream_id) - - def _send_stream_data( - self, request: Request, stream_id: int, data: bytes - ) -> None: - """ - Send a single chunk of data in one or more data frames. - """ - while data: - max_flow = self._wait_for_outgoing_flow(request, stream_id) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - self._h2_state.send_data(stream_id, chunk) - self._write_outgoing_data(request) - - def _send_end_stream(self, request: Request, stream_id: int) -> None: - """ - Send an empty data frame on on a given stream ID with the END_STREAM flag set. - """ - self._h2_state.end_stream(stream_id) - self._write_outgoing_data(request) - - # Receiving the response... - - def _receive_response( - self, request: Request, stream_id: int - ) -> tuple[int, list[tuple[bytes, bytes]]]: - """ - Return the response status code and headers for a given stream ID. - """ - while True: - event = self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.ResponseReceived): - break - - status_code = 200 - headers = [] - assert event.headers is not None - for k, v in event.headers: - if k == b":status": - status_code = int(v.decode("ascii", errors="ignore")) - elif not k.startswith(b":"): - headers.append((k, v)) - - return (status_code, headers) - - def _receive_response_body( - self, request: Request, stream_id: int - ) -> typing.Iterator[bytes]: - """ - Iterator that returns the bytes of the response body for a given stream ID. - """ - while True: - event = self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.DataReceived): - assert event.flow_controlled_length is not None - assert event.data is not None - amount = event.flow_controlled_length - self._h2_state.acknowledge_received_data(amount, stream_id) - self._write_outgoing_data(request) - yield event.data - elif isinstance(event, h2.events.StreamEnded): - break - - def _receive_stream_event( - self, request: Request, stream_id: int - ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: - """ - Return the next available event for a given stream ID. - - Will read more data from the network if required. - """ - while not self._events.get(stream_id): - self._receive_events(request, stream_id) - event = self._events[stream_id].pop(0) - if isinstance(event, h2.events.StreamReset): - raise RemoteProtocolError(event) - return event - - def _receive_events( - self, request: Request, stream_id: int | None = None - ) -> None: - """ - Read some data from the network until we see one or more events - for a given stream ID. - """ - with self._read_lock: - if self._connection_terminated is not None: - last_stream_id = self._connection_terminated.last_stream_id - if stream_id and last_stream_id and stream_id > last_stream_id: - self._request_count -= 1 - raise ConnectionNotAvailable() - raise RemoteProtocolError(self._connection_terminated) - - # This conditional is a bit icky. We don't want to block reading if we've - # actually got an event to return for a given stream. We need to do that - # check *within* the atomic read lock. Though it also need to be optional, - # because when we call it from `_wait_for_outgoing_flow` we *do* want to - # block until we've available flow control, event when we have events - # pending for the stream ID we're attempting to send on. - if stream_id is None or not self._events.get(stream_id): - events = self._read_incoming_data(request) - for event in events: - if isinstance(event, h2.events.RemoteSettingsChanged): - with Trace( - "receive_remote_settings", logger, request - ) as trace: - self._receive_remote_settings_change(event) - trace.return_value = event - - elif isinstance( - event, - ( - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ), - ): - if event.stream_id in self._events: - self._events[event.stream_id].append(event) - - elif isinstance(event, h2.events.ConnectionTerminated): - self._connection_terminated = event - - self._write_outgoing_data(request) - - def _receive_remote_settings_change( - self, event: h2.events.RemoteSettingsChanged - ) -> None: - max_concurrent_streams = event.changed_settings.get( - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS - ) - if max_concurrent_streams: - new_max_streams = min( - max_concurrent_streams.new_value, - self._h2_state.local_settings.max_concurrent_streams, - ) - if new_max_streams and new_max_streams != self._max_streams: - while new_max_streams > self._max_streams: - self._max_streams_semaphore.release() - self._max_streams += 1 - while new_max_streams < self._max_streams: - self._max_streams_semaphore.acquire() - self._max_streams -= 1 - - def _response_closed(self, stream_id: int) -> None: - self._max_streams_semaphore.release() - del self._events[stream_id] - with self._state_lock: - if self._connection_terminated and not self._events: - self.close() - - elif self._state == HTTPConnectionState.ACTIVE and not self._events: - self._state = HTTPConnectionState.IDLE - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - if self._used_all_stream_ids: # pragma: nocover - self.close() - - def close(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._h2_state.close_connection() - self._state = HTTPConnectionState.CLOSED - self._network_stream.close() - - # Wrappers around network read/write operations... - - def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - if self._read_exception is not None: - raise self._read_exception # pragma: nocover - - try: - data = self._network_stream.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - except Exception as exc: - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future reads. - # (For example, this means that a single read timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._read_exception = exc - self._connection_error = True - raise exc - - events: list[h2.events.Event] = self._h2_state.receive_data(data) - - return events - - def _write_outgoing_data(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with self._write_lock: - data_to_send = self._h2_state.data_to_send() - - if self._write_exception is not None: - raise self._write_exception # pragma: nocover - - try: - self._network_stream.write(data_to_send, timeout) - except Exception as exc: # pragma: nocover - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future write. - # (For example, this means that a single write timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._write_exception = exc - self._connection_error = True - raise exc - - # Flow control... - - def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow: int = self._h2_state.local_flow_control_window(stream_id) - max_frame_size: int = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - while flow == 0: - self._receive_events(request) - local_flow = self._h2_state.local_flow_control_window(stream_id) - max_frame_size = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - return flow - - # Interface for connection pooling... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - return ( - self._state != HTTPConnectionState.CLOSED - and not self._connection_error - and not self._used_all_stream_ids - and not ( - self._h2_state.state_machine.state - == h2.connection.ConnectionState.CLOSED - ) - ) - - def has_expired(self) -> bool: - now = time.monotonic() - return self._expire_at is not None and now > self._expire_at - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/2, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> HTTP2Connection: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - self.close() - - -class HTTP2ConnectionByteStream: - def __init__( - self, connection: HTTP2Connection, request: Request, stream_id: int - ) -> None: - self._connection = connection - self._request = request - self._stream_id = stream_id - self._closed = False - - def __iter__(self) -> typing.Iterator[bytes]: - kwargs = {"request": self._request, "stream_id": self._stream_id} - try: - with Trace("receive_response_body", logger, self._request, kwargs): - for chunk in self._connection._receive_response_body( - request=self._request, stream_id=self._stream_id - ): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with ShieldCancellation(): - self.close() - raise exc - - def close(self) -> None: - if not self._closed: - self._closed = True - kwargs = {"stream_id": self._stream_id} - with Trace("response_closed", logger, self._request, kwargs): - self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py deleted file mode 100644 index ecca88f7..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py +++ /dev/null @@ -1,367 +0,0 @@ -from __future__ import annotations - -import base64 -import logging -import ssl -import typing - -from .._backends.base import SOCKET_OPTION, NetworkBackend -from .._exceptions import ProxyError -from .._models import ( - URL, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, -) -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .connection import HTTPConnection -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -ByteOrStr = typing.Union[bytes, str] -HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] -HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] - - -logger = logging.getLogger("httpcore.proxy") - - -def merge_headers( - default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, - override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, -) -> list[tuple[bytes, bytes]]: - """ - Append default_headers and override_headers, de-duplicating if a key exists - in both cases. - """ - default_headers = [] if default_headers is None else list(default_headers) - override_headers = [] if override_headers is None else list(override_headers) - has_override = set(key.lower() for key, value in override_headers) - default_headers = [ - (key, value) - for key, value in default_headers - if key.lower() not in has_override - ] - return default_headers + override_headers - - -class HTTPProxy(ConnectionPool): # pragma: nocover - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: URL | bytes | str, - proxy_auth: tuple[bytes | str, bytes | str] | None = None, - proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, - ssl_context: ssl.SSLContext | None = None, - proxy_ssl_context: ssl.SSLContext | None = None, - max_connections: int | None = 10, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: str | None = None, - uds: str | None = None, - network_backend: NetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - proxy_auth: Any proxy authentication as a two-tuple of - (username, password). May be either bytes or ascii-only str. - proxy_headers: Any HTTP headers to use for the proxy requests. - For example `{"Proxy-Authorization": "Basic :"}`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - local_address=local_address, - uds=uds, - socket_options=socket_options, - ) - - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if ( - self._proxy_url.scheme == b"http" and proxy_ssl_context is not None - ): # pragma: no cover - raise RuntimeError( - "The `proxy_ssl_context` argument is not allowed for the http scheme" - ) - - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - if proxy_auth is not None: - username = enforce_bytes(proxy_auth[0], name="proxy_auth") - password = enforce_bytes(proxy_auth[1], name="proxy_auth") - userpass = username + b":" + password - authorization = b"Basic " + base64.b64encode(userpass) - self._proxy_headers = [ - (b"Proxy-Authorization", authorization) - ] + self._proxy_headers - - def create_connection(self, origin: Origin) -> ConnectionInterface: - if origin.scheme == b"http": - return ForwardHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - proxy_ssl_context=self._proxy_ssl_context, - ) - return TunnelHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - ssl_context=self._ssl_context, - proxy_ssl_context=self._proxy_ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class ForwardHTTPConnection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, - keepalive_expiry: float | None = None, - network_backend: NetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - proxy_ssl_context: ssl.SSLContext | None = None, - ) -> None: - self._connection = HTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._remote_origin = remote_origin - - def handle_request(self, request: Request) -> Response: - headers = merge_headers(self._proxy_headers, request.headers) - url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=bytes(request.url), - ) - proxy_request = Request( - method=request.method, - url=url, - headers=headers, - content=request.stream, - extensions=request.extensions, - ) - return self._connection.handle_request(proxy_request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - self._connection.close() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - -class TunnelHTTPConnection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - ssl_context: ssl.SSLContext | None = None, - proxy_ssl_context: ssl.SSLContext | None = None, - proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - network_backend: NetworkBackend | None = None, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - self._connection: ConnectionInterface = HTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._connect_lock = Lock() - self._connected = False - - def handle_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("connect", None) - - with self._connect_lock: - if not self._connected: - target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) - - connect_url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=target, - ) - connect_headers = merge_headers( - [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers - ) - connect_request = Request( - method=b"CONNECT", - url=connect_url, - headers=connect_headers, - extensions=request.extensions, - ) - connect_response = self._connection.handle_request( - connect_request - ) - - if connect_response.status < 200 or connect_response.status > 299: - reason_bytes = connect_response.extensions.get("reason_phrase", b"") - reason_str = reason_bytes.decode("ascii", errors="ignore") - msg = "%d %s" % (connect_response.status, reason_str) - self._connection.close() - raise ProxyError(msg) - - stream = connect_response.extensions["network_stream"] - - # Upgrade the stream to SSL - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - - self._connected = True - return self._connection.handle_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - self._connection.close() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py b/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py deleted file mode 100644 index e673d4cc..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py +++ /dev/null @@ -1,137 +0,0 @@ -from __future__ import annotations - -import contextlib -import typing - -from .._models import ( - URL, - Extensions, - HeaderTypes, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, - include_request_headers, -) - - -class RequestInterface: - def request( - self, - method: bytes | str, - url: URL | bytes | str, - *, - headers: HeaderTypes = None, - content: bytes | typing.Iterator[bytes] | None = None, - extensions: Extensions | None = None, - ) -> Response: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = self.handle_request(request) - try: - response.read() - finally: - response.close() - return response - - @contextlib.contextmanager - def stream( - self, - method: bytes | str, - url: URL | bytes | str, - *, - headers: HeaderTypes = None, - content: bytes | typing.Iterator[bytes] | None = None, - extensions: Extensions | None = None, - ) -> typing.Iterator[Response]: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = self.handle_request(request) - try: - yield response - finally: - response.close() - - def handle_request(self, request: Request) -> Response: - raise NotImplementedError() # pragma: nocover - - -class ConnectionInterface(RequestInterface): - def close(self) -> None: - raise NotImplementedError() # pragma: nocover - - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def can_handle_request(self, origin: Origin) -> bool: - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an - outgoing request. - - An HTTP/1.1 connection will only be available if it is currently idle. - - An HTTP/2 connection will be available so long as the stream ID space is - not yet exhausted, and the connection is not in an error state. - - While the connection is being established we may not yet know if it is going - to result in an HTTP/1.1 or HTTP/2 connection. The connection should be - treated as being available, but might ultimately raise `NewConnectionRequired` - required exceptions if multiple requests are attempted over a connection - that ends up being established as HTTP/1.1. - """ - raise NotImplementedError() # pragma: nocover - - def has_expired(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - - This either means that the connection is idle and it has passed the - expiry time on its keep-alive, or that server has sent an EOF. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - - Used when a response is closed to determine if the connection may be - returned to the connection pool or not. - """ - raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py deleted file mode 100644 index 0ca96ddf..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py +++ /dev/null @@ -1,341 +0,0 @@ -from __future__ import annotations - -import logging -import ssl - -import socksio - -from .._backends.sync import SyncBackend -from .._backends.base import NetworkBackend, NetworkStream -from .._exceptions import ConnectionNotAvailable, ProxyError -from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.socks") - - -AUTH_METHODS = { - b"\x00": "NO AUTHENTICATION REQUIRED", - b"\x01": "GSSAPI", - b"\x02": "USERNAME/PASSWORD", - b"\xff": "NO ACCEPTABLE METHODS", -} - -REPLY_CODES = { - b"\x00": "Succeeded", - b"\x01": "General SOCKS server failure", - b"\x02": "Connection not allowed by ruleset", - b"\x03": "Network unreachable", - b"\x04": "Host unreachable", - b"\x05": "Connection refused", - b"\x06": "TTL expired", - b"\x07": "Command not supported", - b"\x08": "Address type not supported", -} - - -def _init_socks5_connection( - stream: NetworkStream, - *, - host: bytes, - port: int, - auth: tuple[bytes, bytes] | None = None, -) -> None: - conn = socksio.socks5.SOCKS5Connection() - - # Auth method request - auth_method = ( - socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED - if auth is None - else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD - ) - conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Auth method response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socksio.socks5.SOCKS5AuthReply) - if response.method != auth_method: - requested = AUTH_METHODS.get(auth_method, "UNKNOWN") - responded = AUTH_METHODS.get(response.method, "UNKNOWN") - raise ProxyError( - f"Requested {requested} from proxy server, but got {responded}." - ) - - if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: - # Username/password request - assert auth is not None - username, password = auth - conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Username/password response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) - if not response.success: - raise ProxyError("Invalid username/password") - - # Connect request - conn.send( - socksio.socks5.SOCKS5CommandRequest.from_address( - socksio.socks5.SOCKS5Command.CONNECT, (host, port) - ) - ) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Connect response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socksio.socks5.SOCKS5Reply) - if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: - reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") - raise ProxyError(f"Proxy Server could not connect: {reply_code}.") - - -class SOCKSProxy(ConnectionPool): # pragma: nocover - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: URL | bytes | str, - proxy_auth: tuple[bytes | str, bytes | str] | None = None, - ssl_context: ssl.SSLContext | None = None, - max_connections: int | None = 10, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - network_backend: NetworkBackend | None = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - ) - self._ssl_context = ssl_context - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if proxy_auth is not None: - username, password = proxy_auth - username_bytes = enforce_bytes(username, name="proxy_auth") - password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: tuple[bytes, bytes] | None = ( - username_bytes, - password_bytes, - ) - else: - self._proxy_auth = None - - def create_connection(self, origin: Origin) -> ConnectionInterface: - return Socks5Connection( - proxy_origin=self._proxy_url.origin, - remote_origin=origin, - proxy_auth=self._proxy_auth, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class Socks5Connection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_auth: tuple[bytes, bytes] | None = None, - ssl_context: ssl.SSLContext | None = None, - keepalive_expiry: float | None = None, - http1: bool = True, - http2: bool = False, - network_backend: NetworkBackend | None = None, - ) -> None: - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._proxy_auth = proxy_auth - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - - self._network_backend: NetworkBackend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._connect_lock = Lock() - self._connection: ConnectionInterface | None = None - self._connect_failed = False - - def handle_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - with self._connect_lock: - if self._connection is None: - try: - # Connect to the proxy - kwargs = { - "host": self._proxy_origin.host.decode("ascii"), - "port": self._proxy_origin.port, - "timeout": timeout, - } - with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - - # Connect to the remote host using socks5 - kwargs = { - "stream": stream, - "host": self._remote_origin.host.decode("ascii"), - "port": self._remote_origin.port, - "auth": self._proxy_auth, - } - with Trace( - "setup_socks5_connection", logger, request, kwargs - ) as trace: - _init_socks5_connection(**kwargs) - trace.return_value = stream - - # Upgrade the stream to SSL - if self._remote_origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ( - ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ) - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or ( - self._http2 and not self._http1 - ): # pragma: nocover - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): # pragma: nocover - raise ConnectionNotAvailable() - - return self._connection.handle_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - if self._connection is not None: - self._connection.close() - - def is_available(self) -> bool: - if self._connection is None: # pragma: nocover - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._remote_origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: # pragma: nocover - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_synchronization.py b/venv/lib/python3.12/site-packages/httpcore/_synchronization.py deleted file mode 100644 index 2ecc9e9c..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_synchronization.py +++ /dev/null @@ -1,318 +0,0 @@ -from __future__ import annotations - -import threading -import types - -from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions - -# Our async synchronization primatives use either 'anyio' or 'trio' depending -# on if they're running under asyncio or trio. - -try: - import trio -except (ImportError, NotImplementedError): # pragma: nocover - trio = None # type: ignore - -try: - import anyio -except ImportError: # pragma: nocover - anyio = None # type: ignore - - -def current_async_library() -> str: - # Determine if we're running under trio or asyncio. - # See https://sniffio.readthedocs.io/en/latest/ - try: - import sniffio - except ImportError: # pragma: nocover - environment = "asyncio" - else: - environment = sniffio.current_async_library() - - if environment not in ("asyncio", "trio"): # pragma: nocover - raise RuntimeError("Running under an unsupported async environment.") - - if environment == "asyncio" and anyio is None: # pragma: nocover - raise RuntimeError( - "Running with asyncio requires installation of 'httpcore[asyncio]'." - ) - - if environment == "trio" and trio is None: # pragma: nocover - raise RuntimeError( - "Running with trio requires installation of 'httpcore[trio]'." - ) - - return environment - - -class AsyncLock: - """ - This is a standard lock. - - In the sync case `Lock` provides thread locking. - In the async case `AsyncLock` provides async locking. - """ - - def __init__(self) -> None: - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a lock with the correct implementation. - """ - self._backend = current_async_library() - if self._backend == "trio": - self._trio_lock = trio.Lock() - elif self._backend == "asyncio": - self._anyio_lock = anyio.Lock() - - async def __aenter__(self) -> AsyncLock: - if not self._backend: - self.setup() - - if self._backend == "trio": - await self._trio_lock.acquire() - elif self._backend == "asyncio": - await self._anyio_lock.acquire() - - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - if self._backend == "trio": - self._trio_lock.release() - elif self._backend == "asyncio": - self._anyio_lock.release() - - -class AsyncThreadLock: - """ - This is a threading-only lock for no-I/O contexts. - - In the sync case `ThreadLock` provides thread locking. - In the async case `AsyncThreadLock` is a no-op. - """ - - def __enter__(self) -> AsyncThreadLock: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - pass - - -class AsyncEvent: - def __init__(self) -> None: - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a lock with the correct implementation. - """ - self._backend = current_async_library() - if self._backend == "trio": - self._trio_event = trio.Event() - elif self._backend == "asyncio": - self._anyio_event = anyio.Event() - - def set(self) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - self._trio_event.set() - elif self._backend == "asyncio": - self._anyio_event.set() - - async def wait(self, timeout: float | None = None) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} - timeout_or_inf = float("inf") if timeout is None else timeout - with map_exceptions(trio_exc_map): - with trio.fail_after(timeout_or_inf): - await self._trio_event.wait() - elif self._backend == "asyncio": - anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} - with map_exceptions(anyio_exc_map): - with anyio.fail_after(timeout): - await self._anyio_event.wait() - - -class AsyncSemaphore: - def __init__(self, bound: int) -> None: - self._bound = bound - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a semaphore with the correct implementation. - """ - self._backend = current_async_library() - if self._backend == "trio": - self._trio_semaphore = trio.Semaphore( - initial_value=self._bound, max_value=self._bound - ) - elif self._backend == "asyncio": - self._anyio_semaphore = anyio.Semaphore( - initial_value=self._bound, max_value=self._bound - ) - - async def acquire(self) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - await self._trio_semaphore.acquire() - elif self._backend == "asyncio": - await self._anyio_semaphore.acquire() - - async def release(self) -> None: - if self._backend == "trio": - self._trio_semaphore.release() - elif self._backend == "asyncio": - self._anyio_semaphore.release() - - -class AsyncShieldCancellation: - # For certain portions of our codebase where we're dealing with - # closing connections during exception handling we want to shield - # the operation from being cancelled. - # - # with AsyncShieldCancellation(): - # ... # clean-up operations, shielded from cancellation. - - def __init__(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a shielded scope with the correct implementation. - """ - self._backend = current_async_library() - - if self._backend == "trio": - self._trio_shield = trio.CancelScope(shield=True) - elif self._backend == "asyncio": - self._anyio_shield = anyio.CancelScope(shield=True) - - def __enter__(self) -> AsyncShieldCancellation: - if self._backend == "trio": - self._trio_shield.__enter__() - elif self._backend == "asyncio": - self._anyio_shield.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - if self._backend == "trio": - self._trio_shield.__exit__(exc_type, exc_value, traceback) - elif self._backend == "asyncio": - self._anyio_shield.__exit__(exc_type, exc_value, traceback) - - -# Our thread-based synchronization primitives... - - -class Lock: - """ - This is a standard lock. - - In the sync case `Lock` provides thread locking. - In the async case `AsyncLock` provides async locking. - """ - - def __init__(self) -> None: - self._lock = threading.Lock() - - def __enter__(self) -> Lock: - self._lock.acquire() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - self._lock.release() - - -class ThreadLock: - """ - This is a threading-only lock for no-I/O contexts. - - In the sync case `ThreadLock` provides thread locking. - In the async case `AsyncThreadLock` is a no-op. - """ - - def __init__(self) -> None: - self._lock = threading.Lock() - - def __enter__(self) -> ThreadLock: - self._lock.acquire() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - self._lock.release() - - -class Event: - def __init__(self) -> None: - self._event = threading.Event() - - def set(self) -> None: - self._event.set() - - def wait(self, timeout: float | None = None) -> None: - if timeout == float("inf"): # pragma: no cover - timeout = None - if not self._event.wait(timeout=timeout): - raise PoolTimeout() # pragma: nocover - - -class Semaphore: - def __init__(self, bound: int) -> None: - self._semaphore = threading.Semaphore(value=bound) - - def acquire(self) -> None: - self._semaphore.acquire() - - def release(self) -> None: - self._semaphore.release() - - -class ShieldCancellation: - # Thread-synchronous codebases don't support cancellation semantics. - # We have this class because we need to mirror the async and sync - # cases within our package, but it's just a no-op. - def __enter__(self) -> ShieldCancellation: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - pass diff --git a/venv/lib/python3.12/site-packages/httpcore/_trace.py b/venv/lib/python3.12/site-packages/httpcore/_trace.py deleted file mode 100644 index 5f1cd7c4..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_trace.py +++ /dev/null @@ -1,107 +0,0 @@ -from __future__ import annotations - -import inspect -import logging -import types -import typing - -from ._models import Request - - -class Trace: - def __init__( - self, - name: str, - logger: logging.Logger, - request: Request | None = None, - kwargs: dict[str, typing.Any] | None = None, - ) -> None: - self.name = name - self.logger = logger - self.trace_extension = ( - None if request is None else request.extensions.get("trace") - ) - self.debug = self.logger.isEnabledFor(logging.DEBUG) - self.kwargs = kwargs or {} - self.return_value: typing.Any = None - self.should_trace = self.debug or self.trace_extension is not None - self.prefix = self.logger.name.split(".")[-1] - - def trace(self, name: str, info: dict[str, typing.Any]) -> None: - if self.trace_extension is not None: - prefix_and_name = f"{self.prefix}.{name}" - ret = self.trace_extension(prefix_and_name, info) - if inspect.iscoroutine(ret): # pragma: no cover - raise TypeError( - "If you are using a synchronous interface, " - "the callback of the `trace` extension should " - "be a normal function instead of an asynchronous function." - ) - - if self.debug: - if not info or "return_value" in info and info["return_value"] is None: - message = name - else: - args = " ".join([f"{key}={value!r}" for key, value in info.items()]) - message = f"{name} {args}" - self.logger.debug(message) - - def __enter__(self) -> Trace: - if self.should_trace: - info = self.kwargs - self.trace(f"{self.name}.started", info) - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - if self.should_trace: - if exc_value is None: - info = {"return_value": self.return_value} - self.trace(f"{self.name}.complete", info) - else: - info = {"exception": exc_value} - self.trace(f"{self.name}.failed", info) - - async def atrace(self, name: str, info: dict[str, typing.Any]) -> None: - if self.trace_extension is not None: - prefix_and_name = f"{self.prefix}.{name}" - coro = self.trace_extension(prefix_and_name, info) - if not inspect.iscoroutine(coro): # pragma: no cover - raise TypeError( - "If you're using an asynchronous interface, " - "the callback of the `trace` extension should " - "be an asynchronous function rather than a normal function." - ) - await coro - - if self.debug: - if not info or "return_value" in info and info["return_value"] is None: - message = name - else: - args = " ".join([f"{key}={value!r}" for key, value in info.items()]) - message = f"{name} {args}" - self.logger.debug(message) - - async def __aenter__(self) -> Trace: - if self.should_trace: - info = self.kwargs - await self.atrace(f"{self.name}.started", info) - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: types.TracebackType | None = None, - ) -> None: - if self.should_trace: - if exc_value is None: - info = {"return_value": self.return_value} - await self.atrace(f"{self.name}.complete", info) - else: - info = {"exception": exc_value} - await self.atrace(f"{self.name}.failed", info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_utils.py b/venv/lib/python3.12/site-packages/httpcore/_utils.py deleted file mode 100644 index c44ff93c..00000000 --- a/venv/lib/python3.12/site-packages/httpcore/_utils.py +++ /dev/null @@ -1,37 +0,0 @@ -from __future__ import annotations - -import select -import socket -import sys - - -def is_socket_readable(sock: socket.socket | None) -> bool: - """ - Return whether a socket, as identifed by its file descriptor, is readable. - "A socket is readable" means that the read buffer isn't empty, i.e. that calling - .recv() on it would immediately return some data. - """ - # NOTE: we want check for readability without actually attempting to read, because - # we don't want to block forever if it's not readable. - - # In the case that the socket no longer exists, or cannot return a file - # descriptor, we treat it as being readable, as if it the next read operation - # on it is ready to return the terminating `b""`. - sock_fd = None if sock is None else sock.fileno() - if sock_fd is None or sock_fd < 0: # pragma: nocover - return True - - # The implementation below was stolen from: - # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 - # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 - - # Use select.select on Windows, and when poll is unavailable and select.poll - # everywhere else. (E.g. When eventlet is in use. See #327) - if ( - sys.platform == "win32" or getattr(select, "poll", None) is None - ): # pragma: nocover - rready, _, _ = select.select([sock_fd], [], [], 0) - return bool(rready) - p = select.poll() - p.register(sock_fd, select.POLLIN) - return bool(p.poll(0)) diff --git a/venv/lib/python3.12/site-packages/httpcore/py.typed b/venv/lib/python3.12/site-packages/httpcore/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/METADATA b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/METADATA deleted file mode 100644 index b0d2b196..00000000 --- a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/METADATA +++ /dev/null @@ -1,203 +0,0 @@ -Metadata-Version: 2.3 -Name: httpx -Version: 0.28.1 -Summary: The next generation HTTP client. -Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md -Project-URL: Documentation, https://www.python-httpx.org -Project-URL: Homepage, https://github.com/encode/httpx -Project-URL: Source, https://github.com/encode/httpx -Author-email: Tom Christie -License: BSD-3-Clause -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: Trio -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.8 -Requires-Dist: anyio -Requires-Dist: certifi -Requires-Dist: httpcore==1.* -Requires-Dist: idna -Provides-Extra: brotli -Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli' -Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli' -Provides-Extra: cli -Requires-Dist: click==8.*; extra == 'cli' -Requires-Dist: pygments==2.*; extra == 'cli' -Requires-Dist: rich<14,>=10; extra == 'cli' -Provides-Extra: http2 -Requires-Dist: h2<5,>=3; extra == 'http2' -Provides-Extra: socks -Requires-Dist: socksio==1.*; extra == 'socks' -Provides-Extra: zstd -Requires-Dist: zstandard>=0.18.0; extra == 'zstd' -Description-Content-Type: text/markdown - -

- HTTPX -

- -

HTTPX - A next-generation HTTP client for Python.

- -

- - Test Suite - - - Package version - -

- -HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync and async APIs**. - ---- - -Install HTTPX using pip: - -```shell -$ pip install httpx -``` - -Now, let's get started: - -```pycon ->>> import httpx ->>> r = httpx.get('https://www.example.org/') ->>> r - ->>> r.status_code -200 ->>> r.headers['content-type'] -'text/html; charset=UTF-8' ->>> r.text -'\n\n\nExample Domain...' -``` - -Or, using the command-line client. - -```shell -$ pip install 'httpx[cli]' # The command line client is an optional dependency. -``` - -Which now allows us to use HTTPX directly from the command-line... - -

- httpx --help -

- -Sending a request... - -

- httpx http://httpbin.org/json -

- -## Features - -HTTPX builds on the well-established usability of `requests`, and gives you: - -* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). -* An integrated command-line client. -* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). -* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). -* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport). -* Strict timeouts everywhere. -* Fully type annotated. -* 100% test coverage. - -Plus all the standard features of `requests`... - -* International Domains and URLs -* Keep-Alive & Connection Pooling -* Sessions with Cookie Persistence -* Browser-style SSL Verification -* Basic/Digest Authentication -* Elegant Key/Value Cookies -* Automatic Decompression -* Automatic Content Decoding -* Unicode Response Bodies -* Multipart File Uploads -* HTTP(S) Proxy Support -* Connection Timeouts -* Streaming Downloads -* .netrc Support -* Chunked Requests - -## Installation - -Install with pip: - -```shell -$ pip install httpx -``` - -Or, to include the optional HTTP/2 support, use: - -```shell -$ pip install httpx[http2] -``` - -HTTPX requires Python 3.8+. - -## Documentation - -Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). - -For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). - -For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. - -The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. - -To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). - -## Contribute - -If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. - -## Dependencies - -The HTTPX project relies on these excellent libraries: - -* `httpcore` - The underlying transport implementation for `httpx`. - * `h11` - HTTP/1.1 support. -* `certifi` - SSL certificates. -* `idna` - Internationalized domain name support. -* `sniffio` - Async library autodetection. - -As well as these optional installs: - -* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* -* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* -* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* -* `click` - Command line client support. *(Optional, with `httpx[cli]`)* -* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* -* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)* - -A huge amount of credit is due to `requests` for the API layout that -much of this work follows, as well as to `urllib3` for plenty of design -inspiration around the lower-level networking details. - ---- - -

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

- -## Release Information - -### Fixed - -* Reintroduced supposedly-private `URLTypes` shortcut. (#2673) - - ---- - -[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/RECORD b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/RECORD deleted file mode 100644 index d4f86fd1..00000000 --- a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/RECORD +++ /dev/null @@ -1,55 +0,0 @@ -../../../bin/httpx,sha256=OhQuxXARU_U3ua_JtzuPju3EjqiLcAtShw6EXHvmf6Y,232 -httpx-0.28.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -httpx-0.28.1.dist-info/METADATA,sha256=_rubD48-gNV8gZnDBPNcQzboWB0dGNeYPJJ2a4J5OyU,7052 -httpx-0.28.1.dist-info/RECORD,, -httpx-0.28.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httpx-0.28.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87 -httpx-0.28.1.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 -httpx-0.28.1.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 -httpx/__init__.py,sha256=CsaZe6yZj0rHg6322AWKWHGTMVr9txgEfD5P3_Rrz60,2171 -httpx/__pycache__/__init__.cpython-312.pyc,, -httpx/__pycache__/__version__.cpython-312.pyc,, -httpx/__pycache__/_api.cpython-312.pyc,, -httpx/__pycache__/_auth.cpython-312.pyc,, -httpx/__pycache__/_client.cpython-312.pyc,, -httpx/__pycache__/_config.cpython-312.pyc,, -httpx/__pycache__/_content.cpython-312.pyc,, -httpx/__pycache__/_decoders.cpython-312.pyc,, -httpx/__pycache__/_exceptions.cpython-312.pyc,, -httpx/__pycache__/_main.cpython-312.pyc,, -httpx/__pycache__/_models.cpython-312.pyc,, -httpx/__pycache__/_multipart.cpython-312.pyc,, -httpx/__pycache__/_status_codes.cpython-312.pyc,, -httpx/__pycache__/_types.cpython-312.pyc,, -httpx/__pycache__/_urlparse.cpython-312.pyc,, -httpx/__pycache__/_urls.cpython-312.pyc,, -httpx/__pycache__/_utils.cpython-312.pyc,, -httpx/__version__.py,sha256=LoUyYeOXTieGzuP_64UL0wxdtxjuu_QbOvE7NOg-IqU,108 -httpx/_api.py,sha256=r_Zgs4jIpcPJLqK5dbbSayqo_iVMKFaxZCd-oOHxLEs,11743 -httpx/_auth.py,sha256=Yr3QwaUSK17rGYx-7j-FdicFIzz4Y9FFV-1F4-7RXX4,11891 -httpx/_client.py,sha256=xD-UG67-WMkeltAAOeGGj-cZ2RRTAm19sWRxlFY7_40,65714 -httpx/_config.py,sha256=pPp2U-wicfcKsF-KYRE1LYdt3e6ERGeIoXZ8Gjo3LWc,8547 -httpx/_content.py,sha256=LGGzrJTR3OvN4Mb1GVVNLXkXJH-6oKlwAttO9p5w_yg,8161 -httpx/_decoders.py,sha256=p0dX8I0NEHexs3UGp4SsZutiMhsXrrWl6-GnqVb0iKM,12041 -httpx/_exceptions.py,sha256=bxW7fxzgVMAdNTbwT0Vnq04gJDW1_gI_GFiQPuMyjL0,8527 -httpx/_main.py,sha256=Cg9GMabiTT_swaDfUgIRitSwxLRMSwUDOm7LdSGqlA4,15626 -httpx/_models.py,sha256=4__Guyv1gLxuZChwim8kfQNiIOcJ9acreFOSurvZfms,44700 -httpx/_multipart.py,sha256=KOHEZZl6oohg9mPaKyyu345qq1rJLg35TUG3YAzXB3Y,9843 -httpx/_status_codes.py,sha256=DYn-2ufBgMeXy5s8x3_TB7wjAuAAMewTakPrm5rXEsc,5639 -httpx/_transports/__init__.py,sha256=GbUoBSAOp7z-l-9j5YhMhR3DMIcn6FVLhj072O3Nnno,275 -httpx/_transports/__pycache__/__init__.cpython-312.pyc,, -httpx/_transports/__pycache__/asgi.cpython-312.pyc,, -httpx/_transports/__pycache__/base.cpython-312.pyc,, -httpx/_transports/__pycache__/default.cpython-312.pyc,, -httpx/_transports/__pycache__/mock.cpython-312.pyc,, -httpx/_transports/__pycache__/wsgi.cpython-312.pyc,, -httpx/_transports/asgi.py,sha256=HRfiDYMPt4wQH2gFgHZg4c-i3sblo6bL5GTqcET-xz8,5501 -httpx/_transports/base.py,sha256=kZS_VMbViYfF570pogUCJ1bulz-ybfL51Pqs9yktebU,2523 -httpx/_transports/default.py,sha256=AzeaRUyVwCccTyyNJexDf0n1dFfzzydpdIQgvw7PLnk,13983 -httpx/_transports/mock.py,sha256=PTo0d567RITXxGrki6kN7_67wwAxfwiMDcuXJiZCjEo,1232 -httpx/_transports/wsgi.py,sha256=NcPX3Xap_EwCFZWO_OaSyQNuInCYx1QMNbO8GAei6jY,4825 -httpx/_types.py,sha256=Jyh41GQq7AOev8IOWKDAg7zCbvHAfufmW5g_PiTtErY,2965 -httpx/_urlparse.py,sha256=ZAmH47ONfkxrrj-PPYhGeiHjb6AjKCS-ANWIN4OL_KY,18546 -httpx/_urls.py,sha256=dX99VR1DSOHpgo9Aq7PzYO4FKdxqKjwyNp8grf8dHN0,21550 -httpx/_utils.py,sha256=_TVeqAKvxJkKHdz7dFeb4s0LZqQXgeFkXSgfiHBK_1o,8285 -httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/REQUESTED b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/REQUESTED deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/WHEEL b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/WHEEL deleted file mode 100644 index 21aaa729..00000000 --- a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.26.3 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/entry_points.txt b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/entry_points.txt deleted file mode 100644 index 8ae96007..00000000 --- a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -httpx = httpx:main diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md deleted file mode 100644 index ab79d16a..00000000 --- a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md +++ /dev/null @@ -1,12 +0,0 @@ -Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/httpx/__init__.py b/venv/lib/python3.12/site-packages/httpx/__init__.py deleted file mode 100644 index e9addde0..00000000 --- a/venv/lib/python3.12/site-packages/httpx/__init__.py +++ /dev/null @@ -1,105 +0,0 @@ -from .__version__ import __description__, __title__, __version__ -from ._api import * -from ._auth import * -from ._client import * -from ._config import * -from ._content import * -from ._exceptions import * -from ._models import * -from ._status_codes import * -from ._transports import * -from ._types import * -from ._urls import * - -try: - from ._main import main -except ImportError: # pragma: no cover - - def main() -> None: # type: ignore - import sys - - print( - "The httpx command line client could not run because the required " - "dependencies were not installed.\nMake sure you've installed " - "everything with: pip install 'httpx[cli]'" - ) - sys.exit(1) - - -__all__ = [ - "__description__", - "__title__", - "__version__", - "ASGITransport", - "AsyncBaseTransport", - "AsyncByteStream", - "AsyncClient", - "AsyncHTTPTransport", - "Auth", - "BaseTransport", - "BasicAuth", - "ByteStream", - "Client", - "CloseError", - "codes", - "ConnectError", - "ConnectTimeout", - "CookieConflict", - "Cookies", - "create_ssl_context", - "DecodingError", - "delete", - "DigestAuth", - "get", - "head", - "Headers", - "HTTPError", - "HTTPStatusError", - "HTTPTransport", - "InvalidURL", - "Limits", - "LocalProtocolError", - "main", - "MockTransport", - "NetRCAuth", - "NetworkError", - "options", - "patch", - "PoolTimeout", - "post", - "ProtocolError", - "Proxy", - "ProxyError", - "put", - "QueryParams", - "ReadError", - "ReadTimeout", - "RemoteProtocolError", - "request", - "Request", - "RequestError", - "RequestNotRead", - "Response", - "ResponseNotRead", - "stream", - "StreamClosed", - "StreamConsumed", - "StreamError", - "SyncByteStream", - "Timeout", - "TimeoutException", - "TooManyRedirects", - "TransportError", - "UnsupportedProtocol", - "URL", - "USE_CLIENT_DEFAULT", - "WriteError", - "WriteTimeout", - "WSGITransport", -] - - -__locals = locals() -for __name in __all__: - if not __name.startswith("__"): - setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 0d5508a5..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc deleted file mode 100644 index 87acf17f..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc deleted file mode 100644 index d772d463..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc deleted file mode 100644 index 84531903..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc deleted file mode 100644 index f27829c0..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc deleted file mode 100644 index 5dcf9921..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc deleted file mode 100644 index c252f6f9..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc deleted file mode 100644 index e8bebd27..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc deleted file mode 100644 index 0f265154..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc deleted file mode 100644 index 064e2e59..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc deleted file mode 100644 index 2364bb45..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc deleted file mode 100644 index b8e3c056..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc deleted file mode 100644 index 65afa03a..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc deleted file mode 100644 index bf7700f9..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc deleted file mode 100644 index aab8fcc4..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc deleted file mode 100644 index 511265ac..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc deleted file mode 100644 index 21717dc9..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/__version__.py b/venv/lib/python3.12/site-packages/httpx/__version__.py deleted file mode 100644 index 801bfacf..00000000 --- a/venv/lib/python3.12/site-packages/httpx/__version__.py +++ /dev/null @@ -1,3 +0,0 @@ -__title__ = "httpx" -__description__ = "A next generation HTTP client, for Python 3." -__version__ = "0.28.1" diff --git a/venv/lib/python3.12/site-packages/httpx/_api.py b/venv/lib/python3.12/site-packages/httpx/_api.py deleted file mode 100644 index c3cda1ec..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_api.py +++ /dev/null @@ -1,438 +0,0 @@ -from __future__ import annotations - -import typing -from contextlib import contextmanager - -from ._client import Client -from ._config import DEFAULT_TIMEOUT_CONFIG -from ._models import Response -from ._types import ( - AuthTypes, - CookieTypes, - HeaderTypes, - ProxyTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestFiles, - TimeoutTypes, -) -from ._urls import URL - -if typing.TYPE_CHECKING: - import ssl # pragma: no cover - - -__all__ = [ - "delete", - "get", - "head", - "options", - "patch", - "post", - "put", - "request", - "stream", -] - - -def request( - method: str, - url: URL | str, - *, - params: QueryParamTypes | None = None, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - trust_env: bool = True, -) -> Response: - """ - Sends an HTTP request. - - **Parameters:** - - * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, - `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. - * **url** - URL for the new `Request` object. - * **params** - *(optional)* Query parameters to include in the URL, as a - string, dictionary, or sequence of two-tuples. - * **content** - *(optional)* Binary content to include in the body of the - request, as bytes or a byte iterator. - * **data** - *(optional)* Form data to include in the body of the request, - as a dictionary. - * **files** - *(optional)* A dictionary of upload files to include in the - body of the request. - * **json** - *(optional)* A JSON serializable object to include in the body - of the request. - * **headers** - *(optional)* Dictionary of HTTP headers to include in the - request. - * **cookies** - *(optional)* Dictionary of Cookie items to include in the - request. - * **auth** - *(optional)* An authentication class to use when sending the - request. - * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. - * **timeout** - *(optional)* The timeout configuration to use when sending - the request. - * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. - * **verify** - *(optional)* Either `True` to use an SSL context with the - default CA bundle, `False` to disable verification, or an instance of - `ssl.SSLContext` to use a custom context. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - - **Returns:** `Response` - - Usage: - - ``` - >>> import httpx - >>> response = httpx.request('GET', 'https://httpbin.org/get') - >>> response - - ``` - """ - with Client( - cookies=cookies, - proxy=proxy, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) as client: - return client.request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - auth=auth, - follow_redirects=follow_redirects, - ) - - -@contextmanager -def stream( - method: str, - url: URL | str, - *, - params: QueryParamTypes | None = None, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - trust_env: bool = True, -) -> typing.Iterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - with Client( - cookies=cookies, - proxy=proxy, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) as client: - with client.stream( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - auth=auth, - follow_redirects=follow_redirects, - ) as response: - yield response - - -def get( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `GET` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `GET` requests should not include a request body. - """ - return request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - follow_redirects=follow_redirects, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def options( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `OPTIONS` requests should not include a request body. - """ - return request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - follow_redirects=follow_redirects, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def head( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `HEAD` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `HEAD` requests should not include a request body. - """ - return request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - follow_redirects=follow_redirects, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def post( - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - follow_redirects=follow_redirects, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def put( - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - follow_redirects=follow_redirects, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def patch( - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - follow_redirects: bool = False, - verify: ssl.SSLContext | str | bool = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - follow_redirects=follow_redirects, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def delete( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - follow_redirects: bool = False, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - verify: ssl.SSLContext | str | bool = True, - trust_env: bool = True, -) -> Response: - """ - Sends a `DELETE` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `DELETE` requests should not include a request body. - """ - return request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - follow_redirects=follow_redirects, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) diff --git a/venv/lib/python3.12/site-packages/httpx/_auth.py b/venv/lib/python3.12/site-packages/httpx/_auth.py deleted file mode 100644 index b03971ab..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_auth.py +++ /dev/null @@ -1,348 +0,0 @@ -from __future__ import annotations - -import hashlib -import os -import re -import time -import typing -from base64 import b64encode -from urllib.request import parse_http_list - -from ._exceptions import ProtocolError -from ._models import Cookies, Request, Response -from ._utils import to_bytes, to_str, unquote - -if typing.TYPE_CHECKING: # pragma: no cover - from hashlib import _Hash - - -__all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"] - - -class Auth: - """ - Base class for all authentication schemes. - - To implement a custom authentication scheme, subclass `Auth` and override - the `.auth_flow()` method. - - If the authentication scheme does I/O such as disk access or network calls, or uses - synchronization primitives such as locks, you should override `.sync_auth_flow()` - and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized - implementations that will be used by `Client` and `AsyncClient` respectively. - """ - - requires_request_body = False - requires_response_body = False - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - """ - Execute the authentication flow. - - To dispatch a request, `yield` it: - - ``` - yield request - ``` - - The client will `.send()` the response back into the flow generator. You can - access it like so: - - ``` - response = yield request - ``` - - A `return` (or reaching the end of the generator) will result in the - client returning the last response obtained from the server. - - You can dispatch as many requests as is necessary. - """ - yield request - - def sync_auth_flow( - self, request: Request - ) -> typing.Generator[Request, Response, None]: - """ - Execute the authentication flow synchronously. - - By default, this defers to `.auth_flow()`. You should override this method - when the authentication scheme does I/O and/or uses concurrency primitives. - """ - if self.requires_request_body: - request.read() - - flow = self.auth_flow(request) - request = next(flow) - - while True: - response = yield request - if self.requires_response_body: - response.read() - - try: - request = flow.send(response) - except StopIteration: - break - - async def async_auth_flow( - self, request: Request - ) -> typing.AsyncGenerator[Request, Response]: - """ - Execute the authentication flow asynchronously. - - By default, this defers to `.auth_flow()`. You should override this method - when the authentication scheme does I/O and/or uses concurrency primitives. - """ - if self.requires_request_body: - await request.aread() - - flow = self.auth_flow(request) - request = next(flow) - - while True: - response = yield request - if self.requires_response_body: - await response.aread() - - try: - request = flow.send(response) - except StopIteration: - break - - -class FunctionAuth(Auth): - """ - Allows the 'auth' argument to be passed as a simple callable function, - that takes the request, and returns a new, modified request. - """ - - def __init__(self, func: typing.Callable[[Request], Request]) -> None: - self._func = func - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - yield self._func(request) - - -class BasicAuth(Auth): - """ - Allows the 'auth' argument to be passed as a (username, password) pair, - and uses HTTP Basic authentication. - """ - - def __init__(self, username: str | bytes, password: str | bytes) -> None: - self._auth_header = self._build_auth_header(username, password) - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - request.headers["Authorization"] = self._auth_header - yield request - - def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: - userpass = b":".join((to_bytes(username), to_bytes(password))) - token = b64encode(userpass).decode() - return f"Basic {token}" - - -class NetRCAuth(Auth): - """ - Use a 'netrc' file to lookup basic auth credentials based on the url host. - """ - - def __init__(self, file: str | None = None) -> None: - # Lazily import 'netrc'. - # There's no need for us to load this module unless 'NetRCAuth' is being used. - import netrc - - self._netrc_info = netrc.netrc(file) - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - auth_info = self._netrc_info.authenticators(request.url.host) - if auth_info is None or not auth_info[2]: - # The netrc file did not have authentication credentials for this host. - yield request - else: - # Build a basic auth header with credentials from the netrc file. - request.headers["Authorization"] = self._build_auth_header( - username=auth_info[0], password=auth_info[2] - ) - yield request - - def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: - userpass = b":".join((to_bytes(username), to_bytes(password))) - token = b64encode(userpass).decode() - return f"Basic {token}" - - -class DigestAuth(Auth): - _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = { - "MD5": hashlib.md5, - "MD5-SESS": hashlib.md5, - "SHA": hashlib.sha1, - "SHA-SESS": hashlib.sha1, - "SHA-256": hashlib.sha256, - "SHA-256-SESS": hashlib.sha256, - "SHA-512": hashlib.sha512, - "SHA-512-SESS": hashlib.sha512, - } - - def __init__(self, username: str | bytes, password: str | bytes) -> None: - self._username = to_bytes(username) - self._password = to_bytes(password) - self._last_challenge: _DigestAuthChallenge | None = None - self._nonce_count = 1 - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - if self._last_challenge: - request.headers["Authorization"] = self._build_auth_header( - request, self._last_challenge - ) - - response = yield request - - if response.status_code != 401 or "www-authenticate" not in response.headers: - # If the response is not a 401 then we don't - # need to build an authenticated request. - return - - for auth_header in response.headers.get_list("www-authenticate"): - if auth_header.lower().startswith("digest "): - break - else: - # If the response does not include a 'WWW-Authenticate: Digest ...' - # header, then we don't need to build an authenticated request. - return - - self._last_challenge = self._parse_challenge(request, response, auth_header) - self._nonce_count = 1 - - request.headers["Authorization"] = self._build_auth_header( - request, self._last_challenge - ) - if response.cookies: - Cookies(response.cookies).set_cookie_header(request=request) - yield request - - def _parse_challenge( - self, request: Request, response: Response, auth_header: str - ) -> _DigestAuthChallenge: - """ - Returns a challenge from a Digest WWW-Authenticate header. - These take the form of: - `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` - """ - scheme, _, fields = auth_header.partition(" ") - - # This method should only ever have been called with a Digest auth header. - assert scheme.lower() == "digest" - - header_dict: dict[str, str] = {} - for field in parse_http_list(fields): - key, value = field.strip().split("=", 1) - header_dict[key] = unquote(value) - - try: - realm = header_dict["realm"].encode() - nonce = header_dict["nonce"].encode() - algorithm = header_dict.get("algorithm", "MD5") - opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None - qop = header_dict["qop"].encode() if "qop" in header_dict else None - return _DigestAuthChallenge( - realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop - ) - except KeyError as exc: - message = "Malformed Digest WWW-Authenticate header" - raise ProtocolError(message, request=request) from exc - - def _build_auth_header( - self, request: Request, challenge: _DigestAuthChallenge - ) -> str: - hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] - - def digest(data: bytes) -> bytes: - return hash_func(data).hexdigest().encode() - - A1 = b":".join((self._username, challenge.realm, self._password)) - - path = request.url.raw_path - A2 = b":".join((request.method.encode(), path)) - # TODO: implement auth-int - HA2 = digest(A2) - - nc_value = b"%08x" % self._nonce_count - cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) - self._nonce_count += 1 - - HA1 = digest(A1) - if challenge.algorithm.lower().endswith("-sess"): - HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) - - qop = self._resolve_qop(challenge.qop, request=request) - if qop is None: - # Following RFC 2069 - digest_data = [HA1, challenge.nonce, HA2] - else: - # Following RFC 2617/7616 - digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2] - - format_args = { - "username": self._username, - "realm": challenge.realm, - "nonce": challenge.nonce, - "uri": path, - "response": digest(b":".join(digest_data)), - "algorithm": challenge.algorithm.encode(), - } - if challenge.opaque: - format_args["opaque"] = challenge.opaque - if qop: - format_args["qop"] = b"auth" - format_args["nc"] = nc_value - format_args["cnonce"] = cnonce - - return "Digest " + self._get_header_value(format_args) - - def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: - s = str(nonce_count).encode() - s += nonce - s += time.ctime().encode() - s += os.urandom(8) - - return hashlib.sha1(s).hexdigest()[:16].encode() - - def _get_header_value(self, header_fields: dict[str, bytes]) -> str: - NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") - QUOTED_TEMPLATE = '{}="{}"' - NON_QUOTED_TEMPLATE = "{}={}" - - header_value = "" - for i, (field, value) in enumerate(header_fields.items()): - if i > 0: - header_value += ", " - template = ( - QUOTED_TEMPLATE - if field not in NON_QUOTED_FIELDS - else NON_QUOTED_TEMPLATE - ) - header_value += template.format(field, to_str(value)) - - return header_value - - def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None: - if qop is None: - return None - qops = re.split(b", ?", qop) - if b"auth" in qops: - return b"auth" - - if qops == [b"auth-int"]: - raise NotImplementedError("Digest auth-int support is not yet implemented") - - message = f'Unexpected qop value "{qop!r}" in digest auth' - raise ProtocolError(message, request=request) - - -class _DigestAuthChallenge(typing.NamedTuple): - realm: bytes - nonce: bytes - algorithm: str - opaque: bytes | None - qop: bytes | None diff --git a/venv/lib/python3.12/site-packages/httpx/_client.py b/venv/lib/python3.12/site-packages/httpx/_client.py deleted file mode 100644 index 2249231f..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_client.py +++ /dev/null @@ -1,2019 +0,0 @@ -from __future__ import annotations - -import datetime -import enum -import logging -import time -import typing -import warnings -from contextlib import asynccontextmanager, contextmanager -from types import TracebackType - -from .__version__ import __version__ -from ._auth import Auth, BasicAuth, FunctionAuth -from ._config import ( - DEFAULT_LIMITS, - DEFAULT_MAX_REDIRECTS, - DEFAULT_TIMEOUT_CONFIG, - Limits, - Proxy, - Timeout, -) -from ._decoders import SUPPORTED_DECODERS -from ._exceptions import ( - InvalidURL, - RemoteProtocolError, - TooManyRedirects, - request_context, -) -from ._models import Cookies, Headers, Request, Response -from ._status_codes import codes -from ._transports.base import AsyncBaseTransport, BaseTransport -from ._transports.default import AsyncHTTPTransport, HTTPTransport -from ._types import ( - AsyncByteStream, - AuthTypes, - CertTypes, - CookieTypes, - HeaderTypes, - ProxyTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestExtensions, - RequestFiles, - SyncByteStream, - TimeoutTypes, -) -from ._urls import URL, QueryParams -from ._utils import URLPattern, get_environment_proxies - -if typing.TYPE_CHECKING: - import ssl # pragma: no cover - -__all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"] - -# The type annotation for @classmethod and context managers here follows PEP 484 -# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods -T = typing.TypeVar("T", bound="Client") -U = typing.TypeVar("U", bound="AsyncClient") - - -def _is_https_redirect(url: URL, location: URL) -> bool: - """ - Return 'True' if 'location' is a HTTPS upgrade of 'url' - """ - if url.host != location.host: - return False - - return ( - url.scheme == "http" - and _port_or_default(url) == 80 - and location.scheme == "https" - and _port_or_default(location) == 443 - ) - - -def _port_or_default(url: URL) -> int | None: - if url.port is not None: - return url.port - return {"http": 80, "https": 443}.get(url.scheme) - - -def _same_origin(url: URL, other: URL) -> bool: - """ - Return 'True' if the given URLs share the same origin. - """ - return ( - url.scheme == other.scheme - and url.host == other.host - and _port_or_default(url) == _port_or_default(other) - ) - - -class UseClientDefault: - """ - For some parameters such as `auth=...` and `timeout=...` we need to be able - to indicate the default "unset" state, in a way that is distinctly different - to using `None`. - - The default "unset" state indicates that whatever default is set on the - client should be used. This is different to setting `None`, which - explicitly disables the parameter, possibly overriding a client default. - - For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. - Omitting the `timeout` parameter will send a request using whatever default - timeout has been configured on the client. Including `timeout=None` will - ensure no timeout is used. - - Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, - but it is used internally when a parameter is not included. - """ - - -USE_CLIENT_DEFAULT = UseClientDefault() - - -logger = logging.getLogger("httpx") - -USER_AGENT = f"python-httpx/{__version__}" -ACCEPT_ENCODING = ", ".join( - [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] -) - - -class ClientState(enum.Enum): - # UNOPENED: - # The client has been instantiated, but has not been used to send a request, - # or been opened by entering the context of a `with` block. - UNOPENED = 1 - # OPENED: - # The client has either sent a request, or is within a `with` block. - OPENED = 2 - # CLOSED: - # The client has either exited the `with` block, or `close()` has - # been called explicitly. - CLOSED = 3 - - -class BoundSyncStream(SyncByteStream): - """ - A byte stream that is bound to a given response instance, and that - ensures the `response.elapsed` is set once the response is closed. - """ - - def __init__( - self, stream: SyncByteStream, response: Response, start: float - ) -> None: - self._stream = stream - self._response = response - self._start = start - - def __iter__(self) -> typing.Iterator[bytes]: - for chunk in self._stream: - yield chunk - - def close(self) -> None: - elapsed = time.perf_counter() - self._start - self._response.elapsed = datetime.timedelta(seconds=elapsed) - self._stream.close() - - -class BoundAsyncStream(AsyncByteStream): - """ - An async byte stream that is bound to a given response instance, and that - ensures the `response.elapsed` is set once the response is closed. - """ - - def __init__( - self, stream: AsyncByteStream, response: Response, start: float - ) -> None: - self._stream = stream - self._response = response - self._start = start - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - async for chunk in self._stream: - yield chunk - - async def aclose(self) -> None: - elapsed = time.perf_counter() - self._start - self._response.elapsed = datetime.timedelta(seconds=elapsed) - await self._stream.aclose() - - -EventHook = typing.Callable[..., typing.Any] - - -class BaseClient: - def __init__( - self, - *, - auth: AuthTypes | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, - base_url: URL | str = "", - trust_env: bool = True, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - event_hooks = {} if event_hooks is None else event_hooks - - self._base_url = self._enforce_trailing_slash(URL(base_url)) - - self._auth = self._build_auth(auth) - self._params = QueryParams(params) - self.headers = Headers(headers) - self._cookies = Cookies(cookies) - self._timeout = Timeout(timeout) - self.follow_redirects = follow_redirects - self.max_redirects = max_redirects - self._event_hooks = { - "request": list(event_hooks.get("request", [])), - "response": list(event_hooks.get("response", [])), - } - self._trust_env = trust_env - self._default_encoding = default_encoding - self._state = ClientState.UNOPENED - - @property - def is_closed(self) -> bool: - """ - Check if the client being closed - """ - return self._state == ClientState.CLOSED - - @property - def trust_env(self) -> bool: - return self._trust_env - - def _enforce_trailing_slash(self, url: URL) -> URL: - if url.raw_path.endswith(b"/"): - return url - return url.copy_with(raw_path=url.raw_path + b"/") - - def _get_proxy_map( - self, proxy: ProxyTypes | None, allow_env_proxies: bool - ) -> dict[str, Proxy | None]: - if proxy is None: - if allow_env_proxies: - return { - key: None if url is None else Proxy(url=url) - for key, url in get_environment_proxies().items() - } - return {} - else: - proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy - return {"all://": proxy} - - @property - def timeout(self) -> Timeout: - return self._timeout - - @timeout.setter - def timeout(self, timeout: TimeoutTypes) -> None: - self._timeout = Timeout(timeout) - - @property - def event_hooks(self) -> dict[str, list[EventHook]]: - return self._event_hooks - - @event_hooks.setter - def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None: - self._event_hooks = { - "request": list(event_hooks.get("request", [])), - "response": list(event_hooks.get("response", [])), - } - - @property - def auth(self) -> Auth | None: - """ - Authentication class used when none is passed at the request-level. - - See also [Authentication][0]. - - [0]: /quickstart/#authentication - """ - return self._auth - - @auth.setter - def auth(self, auth: AuthTypes) -> None: - self._auth = self._build_auth(auth) - - @property - def base_url(self) -> URL: - """ - Base URL to use when sending requests with relative URLs. - """ - return self._base_url - - @base_url.setter - def base_url(self, url: URL | str) -> None: - self._base_url = self._enforce_trailing_slash(URL(url)) - - @property - def headers(self) -> Headers: - """ - HTTP headers to include when sending requests. - """ - return self._headers - - @headers.setter - def headers(self, headers: HeaderTypes) -> None: - client_headers = Headers( - { - b"Accept": b"*/*", - b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), - b"Connection": b"keep-alive", - b"User-Agent": USER_AGENT.encode("ascii"), - } - ) - client_headers.update(headers) - self._headers = client_headers - - @property - def cookies(self) -> Cookies: - """ - Cookie values to include when sending requests. - """ - return self._cookies - - @cookies.setter - def cookies(self, cookies: CookieTypes) -> None: - self._cookies = Cookies(cookies) - - @property - def params(self) -> QueryParams: - """ - Query parameters to include in the URL when sending requests. - """ - return self._params - - @params.setter - def params(self, params: QueryParamTypes) -> None: - self._params = QueryParams(params) - - def build_request( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Request: - """ - Build and return a request instance. - - * The `params`, `headers` and `cookies` arguments - are merged with any values set on the client. - * The `url` argument is merged with any `base_url` set on the client. - - See also: [Request instances][0] - - [0]: /advanced/clients/#request-instances - """ - url = self._merge_url(url) - headers = self._merge_headers(headers) - cookies = self._merge_cookies(cookies) - params = self._merge_queryparams(params) - extensions = {} if extensions is None else extensions - if "timeout" not in extensions: - timeout = ( - self.timeout - if isinstance(timeout, UseClientDefault) - else Timeout(timeout) - ) - extensions = dict(**extensions, timeout=timeout.as_dict()) - return Request( - method, - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - extensions=extensions, - ) - - def _merge_url(self, url: URL | str) -> URL: - """ - Merge a URL argument together with any 'base_url' on the client, - to create the URL used for the outgoing request. - """ - merge_url = URL(url) - if merge_url.is_relative_url: - # To merge URLs we always append to the base URL. To get this - # behaviour correct we always ensure the base URL ends in a '/' - # separator, and strip any leading '/' from the merge URL. - # - # So, eg... - # - # >>> client = Client(base_url="https://www.example.com/subpath") - # >>> client.base_url - # URL('https://www.example.com/subpath/') - # >>> client.build_request("GET", "/path").url - # URL('https://www.example.com/subpath/path') - merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") - return self.base_url.copy_with(raw_path=merge_raw_path) - return merge_url - - def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None: - """ - Merge a cookies argument together with any cookies on the client, - to create the cookies used for the outgoing request. - """ - if cookies or self.cookies: - merged_cookies = Cookies(self.cookies) - merged_cookies.update(cookies) - return merged_cookies - return cookies - - def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None: - """ - Merge a headers argument together with any headers on the client, - to create the headers used for the outgoing request. - """ - merged_headers = Headers(self.headers) - merged_headers.update(headers) - return merged_headers - - def _merge_queryparams( - self, params: QueryParamTypes | None = None - ) -> QueryParamTypes | None: - """ - Merge a queryparams argument together with any queryparams on the client, - to create the queryparams used for the outgoing request. - """ - if params or self.params: - merged_queryparams = QueryParams(self.params) - return merged_queryparams.merge(params) - return params - - def _build_auth(self, auth: AuthTypes | None) -> Auth | None: - if auth is None: - return None - elif isinstance(auth, tuple): - return BasicAuth(username=auth[0], password=auth[1]) - elif isinstance(auth, Auth): - return auth - elif callable(auth): - return FunctionAuth(func=auth) - else: - raise TypeError(f'Invalid "auth" argument: {auth!r}') - - def _build_request_auth( - self, - request: Request, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - ) -> Auth: - auth = ( - self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) - ) - - if auth is not None: - return auth - - username, password = request.url.username, request.url.password - if username or password: - return BasicAuth(username=username, password=password) - - return Auth() - - def _build_redirect_request(self, request: Request, response: Response) -> Request: - """ - Given a request and a redirect response, return a new request that - should be used to effect the redirect. - """ - method = self._redirect_method(request, response) - url = self._redirect_url(request, response) - headers = self._redirect_headers(request, url, method) - stream = self._redirect_stream(request, method) - cookies = Cookies(self.cookies) - return Request( - method=method, - url=url, - headers=headers, - cookies=cookies, - stream=stream, - extensions=request.extensions, - ) - - def _redirect_method(self, request: Request, response: Response) -> str: - """ - When being redirected we may want to change the method of the request - based on certain specs or browser behavior. - """ - method = request.method - - # https://tools.ietf.org/html/rfc7231#section-6.4.4 - if response.status_code == codes.SEE_OTHER and method != "HEAD": - method = "GET" - - # Do what the browsers do, despite standards... - # Turn 302s into GETs. - if response.status_code == codes.FOUND and method != "HEAD": - method = "GET" - - # If a POST is responded to with a 301, turn it into a GET. - # This bizarre behaviour is explained in 'requests' issue 1704. - if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": - method = "GET" - - return method - - def _redirect_url(self, request: Request, response: Response) -> URL: - """ - Return the URL for the redirect to follow. - """ - location = response.headers["Location"] - - try: - url = URL(location) - except InvalidURL as exc: - raise RemoteProtocolError( - f"Invalid URL in location header: {exc}.", request=request - ) from None - - # Handle malformed 'Location' headers that are "absolute" form, have no host. - # See: https://github.com/encode/httpx/issues/771 - if url.scheme and not url.host: - url = url.copy_with(host=request.url.host) - - # Facilitate relative 'Location' headers, as allowed by RFC 7231. - # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') - if url.is_relative_url: - url = request.url.join(url) - - # Attach previous fragment if needed (RFC 7231 7.1.2) - if request.url.fragment and not url.fragment: - url = url.copy_with(fragment=request.url.fragment) - - return url - - def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: - """ - Return the headers that should be used for the redirect request. - """ - headers = Headers(request.headers) - - if not _same_origin(url, request.url): - if not _is_https_redirect(request.url, url): - # Strip Authorization headers when responses are redirected - # away from the origin. (Except for direct HTTP to HTTPS redirects.) - headers.pop("Authorization", None) - - # Update the Host header. - headers["Host"] = url.netloc.decode("ascii") - - if method != request.method and method == "GET": - # If we've switch to a 'GET' request, then strip any headers which - # are only relevant to the request body. - headers.pop("Content-Length", None) - headers.pop("Transfer-Encoding", None) - - # We should use the client cookie store to determine any cookie header, - # rather than whatever was on the original outgoing request. - headers.pop("Cookie", None) - - return headers - - def _redirect_stream( - self, request: Request, method: str - ) -> SyncByteStream | AsyncByteStream | None: - """ - Return the body that should be used for the redirect request. - """ - if method != request.method and method == "GET": - return None - - return request.stream - - def _set_timeout(self, request: Request) -> None: - if "timeout" not in request.extensions: - timeout = ( - self.timeout - if isinstance(self.timeout, UseClientDefault) - else Timeout(self.timeout) - ) - request.extensions = dict(**request.extensions, timeout=timeout.as_dict()) - - -class Client(BaseClient): - """ - An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. - - It can be shared between threads. - - Usage: - - ```python - >>> client = httpx.Client() - >>> response = client.get('https://example.org') - ``` - - **Parameters:** - - * **auth** - *(optional)* An authentication class to use when sending - requests. - * **params** - *(optional)* Query parameters to include in request URLs, as - a string, dictionary, or sequence of two-tuples. - * **headers** - *(optional)* Dictionary of HTTP headers to include when - sending requests. - * **cookies** - *(optional)* Dictionary of Cookie items to include when - sending requests. - * **verify** - *(optional)* Either `True` to use an SSL context with the - default CA bundle, `False` to disable verification, or an instance of - `ssl.SSLContext` to use a custom context. - * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be - enabled. Defaults to `False`. - * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. - * **timeout** - *(optional)* The timeout configuration to use when sending - requests. - * **limits** - *(optional)* The limits configuration to use. - * **max_redirects** - *(optional)* The maximum number of redirect responses - that should be followed. - * **base_url** - *(optional)* A URL to use as the base when building - request URLs. - * **transport** - *(optional)* A transport class to use for sending requests - over the network. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - * **default_encoding** - *(optional)* The default encoding to use for decoding - response text, if no charset information is included in a response Content-Type - header. Set to a callable for automatic character set detection. Default: "utf-8". - """ - - def __init__( - self, - *, - auth: AuthTypes | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, - http1: bool = True, - http2: bool = False, - proxy: ProxyTypes | None = None, - mounts: None | (typing.Mapping[str, BaseTransport | None]) = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - limits: Limits = DEFAULT_LIMITS, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, - base_url: URL | str = "", - transport: BaseTransport | None = None, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - super().__init__( - auth=auth, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - follow_redirects=follow_redirects, - max_redirects=max_redirects, - event_hooks=event_hooks, - base_url=base_url, - trust_env=trust_env, - default_encoding=default_encoding, - ) - - if http2: - try: - import h2 # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using http2=True, but the 'h2' package is not installed. " - "Make sure to install httpx using `pip install httpx[http2]`." - ) from None - - allow_env_proxies = trust_env and transport is None - proxy_map = self._get_proxy_map(proxy, allow_env_proxies) - - self._transport = self._init_transport( - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - transport=transport, - ) - self._mounts: dict[URLPattern, BaseTransport | None] = { - URLPattern(key): None - if proxy is None - else self._init_proxy_transport( - proxy, - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - ) - for key, proxy in proxy_map.items() - } - if mounts is not None: - self._mounts.update( - {URLPattern(key): transport for key, transport in mounts.items()} - ) - - self._mounts = dict(sorted(self._mounts.items())) - - def _init_transport( - self, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - transport: BaseTransport | None = None, - ) -> BaseTransport: - if transport is not None: - return transport - - return HTTPTransport( - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - ) - - def _init_proxy_transport( - self, - proxy: Proxy, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - ) -> BaseTransport: - return HTTPTransport( - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - proxy=proxy, - ) - - def _transport_for_url(self, url: URL) -> BaseTransport: - """ - Returns the transport instance that should be used for a given URL. - This will either be the standard connection pool, or a proxy. - """ - for pattern, transport in self._mounts.items(): - if pattern.matches(url): - return self._transport if transport is None else transport - - return self._transport - - def request( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Build and send a request. - - Equivalent to: - - ```python - request = client.build_request(...) - response = client.send(request, ...) - ``` - - See `Client.build_request()`, `Client.send()` and - [Merging of configuration][0] for how the various parameters - are merged with client-level configuration. - - [0]: /advanced/clients/#merging-of-configuration - """ - if cookies is not None: - message = ( - "Setting per-request cookies=<...> is being deprecated, because " - "the expected behaviour on cookie persistence is ambiguous. Set " - "cookies directly on the client instance instead." - ) - warnings.warn(message, DeprecationWarning, stacklevel=2) - - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - return self.send(request, auth=auth, follow_redirects=follow_redirects) - - @contextmanager - def stream( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> typing.Iterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - response = self.send( - request=request, - auth=auth, - follow_redirects=follow_redirects, - stream=True, - ) - try: - yield response - finally: - response.close() - - def send( - self, - request: Request, - *, - stream: bool = False, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - ) -> Response: - """ - Send a request. - - The request is sent as-is, unmodified. - - Typically you'll want to build one with `Client.build_request()` - so that any client-level configuration is merged into the request, - but passing an explicit `httpx.Request()` is supported as well. - - See also: [Request instances][0] - - [0]: /advanced/clients/#request-instances - """ - if self._state == ClientState.CLOSED: - raise RuntimeError("Cannot send a request, as the client has been closed.") - - self._state = ClientState.OPENED - follow_redirects = ( - self.follow_redirects - if isinstance(follow_redirects, UseClientDefault) - else follow_redirects - ) - - self._set_timeout(request) - - auth = self._build_request_auth(request, auth) - - response = self._send_handling_auth( - request, - auth=auth, - follow_redirects=follow_redirects, - history=[], - ) - try: - if not stream: - response.read() - - return response - - except BaseException as exc: - response.close() - raise exc - - def _send_handling_auth( - self, - request: Request, - auth: Auth, - follow_redirects: bool, - history: list[Response], - ) -> Response: - auth_flow = auth.sync_auth_flow(request) - try: - request = next(auth_flow) - - while True: - response = self._send_handling_redirects( - request, - follow_redirects=follow_redirects, - history=history, - ) - try: - try: - next_request = auth_flow.send(response) - except StopIteration: - return response - - response.history = list(history) - response.read() - request = next_request - history.append(response) - - except BaseException as exc: - response.close() - raise exc - finally: - auth_flow.close() - - def _send_handling_redirects( - self, - request: Request, - follow_redirects: bool, - history: list[Response], - ) -> Response: - while True: - if len(history) > self.max_redirects: - raise TooManyRedirects( - "Exceeded maximum allowed redirects.", request=request - ) - - for hook in self._event_hooks["request"]: - hook(request) - - response = self._send_single_request(request) - try: - for hook in self._event_hooks["response"]: - hook(response) - response.history = list(history) - - if not response.has_redirect_location: - return response - - request = self._build_redirect_request(request, response) - history = history + [response] - - if follow_redirects: - response.read() - else: - response.next_request = request - return response - - except BaseException as exc: - response.close() - raise exc - - def _send_single_request(self, request: Request) -> Response: - """ - Sends a single request, without handling any redirections. - """ - transport = self._transport_for_url(request.url) - start = time.perf_counter() - - if not isinstance(request.stream, SyncByteStream): - raise RuntimeError( - "Attempted to send an async request with a sync Client instance." - ) - - with request_context(request=request): - response = transport.handle_request(request) - - assert isinstance(response.stream, SyncByteStream) - - response.request = request - response.stream = BoundSyncStream( - response.stream, response=response, start=start - ) - self.cookies.extract_cookies(response) - response.default_encoding = self._default_encoding - - logger.info( - 'HTTP Request: %s %s "%s %d %s"', - request.method, - request.url, - response.http_version, - response.status_code, - response.reason_phrase, - ) - - return response - - def get( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `GET` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def options( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def head( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `HEAD` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def post( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def put( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def patch( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def delete( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `DELETE` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def close(self) -> None: - """ - Close transport and proxies. - """ - if self._state != ClientState.CLOSED: - self._state = ClientState.CLOSED - - self._transport.close() - for transport in self._mounts.values(): - if transport is not None: - transport.close() - - def __enter__(self: T) -> T: - if self._state != ClientState.UNOPENED: - msg = { - ClientState.OPENED: "Cannot open a client instance more than once.", - ClientState.CLOSED: ( - "Cannot reopen a client instance, once it has been closed." - ), - }[self._state] - raise RuntimeError(msg) - - self._state = ClientState.OPENED - - self._transport.__enter__() - for transport in self._mounts.values(): - if transport is not None: - transport.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - self._state = ClientState.CLOSED - - self._transport.__exit__(exc_type, exc_value, traceback) - for transport in self._mounts.values(): - if transport is not None: - transport.__exit__(exc_type, exc_value, traceback) - - -class AsyncClient(BaseClient): - """ - An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, - cookie persistence, etc. - - It can be shared between tasks. - - Usage: - - ```python - >>> async with httpx.AsyncClient() as client: - >>> response = await client.get('https://example.org') - ``` - - **Parameters:** - - * **auth** - *(optional)* An authentication class to use when sending - requests. - * **params** - *(optional)* Query parameters to include in request URLs, as - a string, dictionary, or sequence of two-tuples. - * **headers** - *(optional)* Dictionary of HTTP headers to include when - sending requests. - * **cookies** - *(optional)* Dictionary of Cookie items to include when - sending requests. - * **verify** - *(optional)* Either `True` to use an SSL context with the - default CA bundle, `False` to disable verification, or an instance of - `ssl.SSLContext` to use a custom context. - * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be - enabled. Defaults to `False`. - * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. - * **timeout** - *(optional)* The timeout configuration to use when sending - requests. - * **limits** - *(optional)* The limits configuration to use. - * **max_redirects** - *(optional)* The maximum number of redirect responses - that should be followed. - * **base_url** - *(optional)* A URL to use as the base when building - request URLs. - * **transport** - *(optional)* A transport class to use for sending requests - over the network. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - * **default_encoding** - *(optional)* The default encoding to use for decoding - response text, if no charset information is included in a response Content-Type - header. Set to a callable for automatic character set detection. Default: "utf-8". - """ - - def __init__( - self, - *, - auth: AuthTypes | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - proxy: ProxyTypes | None = None, - mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - limits: Limits = DEFAULT_LIMITS, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, - base_url: URL | str = "", - transport: AsyncBaseTransport | None = None, - trust_env: bool = True, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - super().__init__( - auth=auth, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - follow_redirects=follow_redirects, - max_redirects=max_redirects, - event_hooks=event_hooks, - base_url=base_url, - trust_env=trust_env, - default_encoding=default_encoding, - ) - - if http2: - try: - import h2 # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using http2=True, but the 'h2' package is not installed. " - "Make sure to install httpx using `pip install httpx[http2]`." - ) from None - - allow_env_proxies = trust_env and transport is None - proxy_map = self._get_proxy_map(proxy, allow_env_proxies) - - self._transport = self._init_transport( - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - transport=transport, - ) - - self._mounts: dict[URLPattern, AsyncBaseTransport | None] = { - URLPattern(key): None - if proxy is None - else self._init_proxy_transport( - proxy, - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - ) - for key, proxy in proxy_map.items() - } - if mounts is not None: - self._mounts.update( - {URLPattern(key): transport for key, transport in mounts.items()} - ) - self._mounts = dict(sorted(self._mounts.items())) - - def _init_transport( - self, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - transport: AsyncBaseTransport | None = None, - ) -> AsyncBaseTransport: - if transport is not None: - return transport - - return AsyncHTTPTransport( - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - ) - - def _init_proxy_transport( - self, - proxy: Proxy, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - ) -> AsyncBaseTransport: - return AsyncHTTPTransport( - verify=verify, - cert=cert, - trust_env=trust_env, - http1=http1, - http2=http2, - limits=limits, - proxy=proxy, - ) - - def _transport_for_url(self, url: URL) -> AsyncBaseTransport: - """ - Returns the transport instance that should be used for a given URL. - This will either be the standard connection pool, or a proxy. - """ - for pattern, transport in self._mounts.items(): - if pattern.matches(url): - return self._transport if transport is None else transport - - return self._transport - - async def request( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Build and send a request. - - Equivalent to: - - ```python - request = client.build_request(...) - response = await client.send(request, ...) - ``` - - See `AsyncClient.build_request()`, `AsyncClient.send()` - and [Merging of configuration][0] for how the various parameters - are merged with client-level configuration. - - [0]: /advanced/clients/#merging-of-configuration - """ - - if cookies is not None: # pragma: no cover - message = ( - "Setting per-request cookies=<...> is being deprecated, because " - "the expected behaviour on cookie persistence is ambiguous. Set " - "cookies directly on the client instance instead." - ) - warnings.warn(message, DeprecationWarning, stacklevel=2) - - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - return await self.send(request, auth=auth, follow_redirects=follow_redirects) - - @asynccontextmanager - async def stream( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> typing.AsyncIterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - response = await self.send( - request=request, - auth=auth, - follow_redirects=follow_redirects, - stream=True, - ) - try: - yield response - finally: - await response.aclose() - - async def send( - self, - request: Request, - *, - stream: bool = False, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - ) -> Response: - """ - Send a request. - - The request is sent as-is, unmodified. - - Typically you'll want to build one with `AsyncClient.build_request()` - so that any client-level configuration is merged into the request, - but passing an explicit `httpx.Request()` is supported as well. - - See also: [Request instances][0] - - [0]: /advanced/clients/#request-instances - """ - if self._state == ClientState.CLOSED: - raise RuntimeError("Cannot send a request, as the client has been closed.") - - self._state = ClientState.OPENED - follow_redirects = ( - self.follow_redirects - if isinstance(follow_redirects, UseClientDefault) - else follow_redirects - ) - - self._set_timeout(request) - - auth = self._build_request_auth(request, auth) - - response = await self._send_handling_auth( - request, - auth=auth, - follow_redirects=follow_redirects, - history=[], - ) - try: - if not stream: - await response.aread() - - return response - - except BaseException as exc: - await response.aclose() - raise exc - - async def _send_handling_auth( - self, - request: Request, - auth: Auth, - follow_redirects: bool, - history: list[Response], - ) -> Response: - auth_flow = auth.async_auth_flow(request) - try: - request = await auth_flow.__anext__() - - while True: - response = await self._send_handling_redirects( - request, - follow_redirects=follow_redirects, - history=history, - ) - try: - try: - next_request = await auth_flow.asend(response) - except StopAsyncIteration: - return response - - response.history = list(history) - await response.aread() - request = next_request - history.append(response) - - except BaseException as exc: - await response.aclose() - raise exc - finally: - await auth_flow.aclose() - - async def _send_handling_redirects( - self, - request: Request, - follow_redirects: bool, - history: list[Response], - ) -> Response: - while True: - if len(history) > self.max_redirects: - raise TooManyRedirects( - "Exceeded maximum allowed redirects.", request=request - ) - - for hook in self._event_hooks["request"]: - await hook(request) - - response = await self._send_single_request(request) - try: - for hook in self._event_hooks["response"]: - await hook(response) - - response.history = list(history) - - if not response.has_redirect_location: - return response - - request = self._build_redirect_request(request, response) - history = history + [response] - - if follow_redirects: - await response.aread() - else: - response.next_request = request - return response - - except BaseException as exc: - await response.aclose() - raise exc - - async def _send_single_request(self, request: Request) -> Response: - """ - Sends a single request, without handling any redirections. - """ - transport = self._transport_for_url(request.url) - start = time.perf_counter() - - if not isinstance(request.stream, AsyncByteStream): - raise RuntimeError( - "Attempted to send an sync request with an AsyncClient instance." - ) - - with request_context(request=request): - response = await transport.handle_async_request(request) - - assert isinstance(response.stream, AsyncByteStream) - response.request = request - response.stream = BoundAsyncStream( - response.stream, response=response, start=start - ) - self.cookies.extract_cookies(response) - response.default_encoding = self._default_encoding - - logger.info( - 'HTTP Request: %s %s "%s %d %s"', - request.method, - request.url, - response.http_version, - response.status_code, - response.reason_phrase, - ) - - return response - - async def get( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `GET` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def options( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def head( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `HEAD` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def post( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def put( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def patch( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def delete( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `DELETE` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def aclose(self) -> None: - """ - Close transport and proxies. - """ - if self._state != ClientState.CLOSED: - self._state = ClientState.CLOSED - - await self._transport.aclose() - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.aclose() - - async def __aenter__(self: U) -> U: - if self._state != ClientState.UNOPENED: - msg = { - ClientState.OPENED: "Cannot open a client instance more than once.", - ClientState.CLOSED: ( - "Cannot reopen a client instance, once it has been closed." - ), - }[self._state] - raise RuntimeError(msg) - - self._state = ClientState.OPENED - - await self._transport.__aenter__() - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.__aenter__() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - self._state = ClientState.CLOSED - - await self._transport.__aexit__(exc_type, exc_value, traceback) - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/venv/lib/python3.12/site-packages/httpx/_config.py b/venv/lib/python3.12/site-packages/httpx/_config.py deleted file mode 100644 index 467a6c90..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_config.py +++ /dev/null @@ -1,248 +0,0 @@ -from __future__ import annotations - -import os -import typing - -from ._models import Headers -from ._types import CertTypes, HeaderTypes, TimeoutTypes -from ._urls import URL - -if typing.TYPE_CHECKING: - import ssl # pragma: no cover - -__all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"] - - -class UnsetType: - pass # pragma: no cover - - -UNSET = UnsetType() - - -def create_ssl_context( - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, -) -> ssl.SSLContext: - import ssl - import warnings - - import certifi - - if verify is True: - if trust_env and os.environ.get("SSL_CERT_FILE"): # pragma: nocover - ctx = ssl.create_default_context(cafile=os.environ["SSL_CERT_FILE"]) - elif trust_env and os.environ.get("SSL_CERT_DIR"): # pragma: nocover - ctx = ssl.create_default_context(capath=os.environ["SSL_CERT_DIR"]) - else: - # Default case... - ctx = ssl.create_default_context(cafile=certifi.where()) - elif verify is False: - ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - ctx.check_hostname = False - ctx.verify_mode = ssl.CERT_NONE - elif isinstance(verify, str): # pragma: nocover - message = ( - "`verify=` is deprecated. " - "Use `verify=ssl.create_default_context(cafile=...)` " - "or `verify=ssl.create_default_context(capath=...)` instead." - ) - warnings.warn(message, DeprecationWarning) - if os.path.isdir(verify): - return ssl.create_default_context(capath=verify) - return ssl.create_default_context(cafile=verify) - else: - ctx = verify - - if cert: # pragma: nocover - message = ( - "`cert=...` is deprecated. Use `verify=` instead," - "with `.load_cert_chain()` to configure the certificate chain." - ) - warnings.warn(message, DeprecationWarning) - if isinstance(cert, str): - ctx.load_cert_chain(cert) - else: - ctx.load_cert_chain(*cert) - - return ctx - - -class Timeout: - """ - Timeout configuration. - - **Usage**: - - Timeout(None) # No timeouts. - Timeout(5.0) # 5s timeout on all operations. - Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. - Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. - Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. - # 5s timeout elsewhere. - """ - - def __init__( - self, - timeout: TimeoutTypes | UnsetType = UNSET, - *, - connect: None | float | UnsetType = UNSET, - read: None | float | UnsetType = UNSET, - write: None | float | UnsetType = UNSET, - pool: None | float | UnsetType = UNSET, - ) -> None: - if isinstance(timeout, Timeout): - # Passed as a single explicit Timeout. - assert connect is UNSET - assert read is UNSET - assert write is UNSET - assert pool is UNSET - self.connect = timeout.connect # type: typing.Optional[float] - self.read = timeout.read # type: typing.Optional[float] - self.write = timeout.write # type: typing.Optional[float] - self.pool = timeout.pool # type: typing.Optional[float] - elif isinstance(timeout, tuple): - # Passed as a tuple. - self.connect = timeout[0] - self.read = timeout[1] - self.write = None if len(timeout) < 3 else timeout[2] - self.pool = None if len(timeout) < 4 else timeout[3] - elif not ( - isinstance(connect, UnsetType) - or isinstance(read, UnsetType) - or isinstance(write, UnsetType) - or isinstance(pool, UnsetType) - ): - self.connect = connect - self.read = read - self.write = write - self.pool = pool - else: - if isinstance(timeout, UnsetType): - raise ValueError( - "httpx.Timeout must either include a default, or set all " - "four parameters explicitly." - ) - self.connect = timeout if isinstance(connect, UnsetType) else connect - self.read = timeout if isinstance(read, UnsetType) else read - self.write = timeout if isinstance(write, UnsetType) else write - self.pool = timeout if isinstance(pool, UnsetType) else pool - - def as_dict(self) -> dict[str, float | None]: - return { - "connect": self.connect, - "read": self.read, - "write": self.write, - "pool": self.pool, - } - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, self.__class__) - and self.connect == other.connect - and self.read == other.read - and self.write == other.write - and self.pool == other.pool - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - if len({self.connect, self.read, self.write, self.pool}) == 1: - return f"{class_name}(timeout={self.connect})" - return ( - f"{class_name}(connect={self.connect}, " - f"read={self.read}, write={self.write}, pool={self.pool})" - ) - - -class Limits: - """ - Configuration for limits to various client behaviors. - - **Parameters:** - - * **max_connections** - The maximum number of concurrent connections that may be - established. - * **max_keepalive_connections** - Allow the connection pool to maintain - keep-alive connections below this point. Should be less than or equal - to `max_connections`. - * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. - """ - - def __init__( - self, - *, - max_connections: int | None = None, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = 5.0, - ) -> None: - self.max_connections = max_connections - self.max_keepalive_connections = max_keepalive_connections - self.keepalive_expiry = keepalive_expiry - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, self.__class__) - and self.max_connections == other.max_connections - and self.max_keepalive_connections == other.max_keepalive_connections - and self.keepalive_expiry == other.keepalive_expiry - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - return ( - f"{class_name}(max_connections={self.max_connections}, " - f"max_keepalive_connections={self.max_keepalive_connections}, " - f"keepalive_expiry={self.keepalive_expiry})" - ) - - -class Proxy: - def __init__( - self, - url: URL | str, - *, - ssl_context: ssl.SSLContext | None = None, - auth: tuple[str, str] | None = None, - headers: HeaderTypes | None = None, - ) -> None: - url = URL(url) - headers = Headers(headers) - - if url.scheme not in ("http", "https", "socks5", "socks5h"): - raise ValueError(f"Unknown scheme for proxy URL {url!r}") - - if url.username or url.password: - # Remove any auth credentials from the URL. - auth = (url.username, url.password) - url = url.copy_with(username=None, password=None) - - self.url = url - self.auth = auth - self.headers = headers - self.ssl_context = ssl_context - - @property - def raw_auth(self) -> tuple[bytes, bytes] | None: - # The proxy authentication as raw bytes. - return ( - None - if self.auth is None - else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) - ) - - def __repr__(self) -> str: - # The authentication is represented with the password component masked. - auth = (self.auth[0], "********") if self.auth else None - - # Build a nice concise representation. - url_str = f"{str(self.url)!r}" - auth_str = f", auth={auth!r}" if auth else "" - headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" - return f"Proxy({url_str}{auth_str}{headers_str})" - - -DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) -DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) -DEFAULT_MAX_REDIRECTS = 20 diff --git a/venv/lib/python3.12/site-packages/httpx/_content.py b/venv/lib/python3.12/site-packages/httpx/_content.py deleted file mode 100644 index 6f479a08..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_content.py +++ /dev/null @@ -1,240 +0,0 @@ -from __future__ import annotations - -import inspect -import warnings -from json import dumps as json_dumps -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Iterable, - Iterator, - Mapping, -) -from urllib.parse import urlencode - -from ._exceptions import StreamClosed, StreamConsumed -from ._multipart import MultipartStream -from ._types import ( - AsyncByteStream, - RequestContent, - RequestData, - RequestFiles, - ResponseContent, - SyncByteStream, -) -from ._utils import peek_filelike_length, primitive_value_to_str - -__all__ = ["ByteStream"] - - -class ByteStream(AsyncByteStream, SyncByteStream): - def __init__(self, stream: bytes) -> None: - self._stream = stream - - def __iter__(self) -> Iterator[bytes]: - yield self._stream - - async def __aiter__(self) -> AsyncIterator[bytes]: - yield self._stream - - -class IteratorByteStream(SyncByteStream): - CHUNK_SIZE = 65_536 - - def __init__(self, stream: Iterable[bytes]) -> None: - self._stream = stream - self._is_stream_consumed = False - self._is_generator = inspect.isgenerator(stream) - - def __iter__(self) -> Iterator[bytes]: - if self._is_stream_consumed and self._is_generator: - raise StreamConsumed() - - self._is_stream_consumed = True - if hasattr(self._stream, "read"): - # File-like interfaces should use 'read' directly. - chunk = self._stream.read(self.CHUNK_SIZE) - while chunk: - yield chunk - chunk = self._stream.read(self.CHUNK_SIZE) - else: - # Otherwise iterate. - for part in self._stream: - yield part - - -class AsyncIteratorByteStream(AsyncByteStream): - CHUNK_SIZE = 65_536 - - def __init__(self, stream: AsyncIterable[bytes]) -> None: - self._stream = stream - self._is_stream_consumed = False - self._is_generator = inspect.isasyncgen(stream) - - async def __aiter__(self) -> AsyncIterator[bytes]: - if self._is_stream_consumed and self._is_generator: - raise StreamConsumed() - - self._is_stream_consumed = True - if hasattr(self._stream, "aread"): - # File-like interfaces should use 'aread' directly. - chunk = await self._stream.aread(self.CHUNK_SIZE) - while chunk: - yield chunk - chunk = await self._stream.aread(self.CHUNK_SIZE) - else: - # Otherwise iterate. - async for part in self._stream: - yield part - - -class UnattachedStream(AsyncByteStream, SyncByteStream): - """ - If a request or response is serialized using pickle, then it is no longer - attached to a stream for I/O purposes. Any stream operations should result - in `httpx.StreamClosed`. - """ - - def __iter__(self) -> Iterator[bytes]: - raise StreamClosed() - - async def __aiter__(self) -> AsyncIterator[bytes]: - raise StreamClosed() - yield b"" # pragma: no cover - - -def encode_content( - content: str | bytes | Iterable[bytes] | AsyncIterable[bytes], -) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: - if isinstance(content, (bytes, str)): - body = content.encode("utf-8") if isinstance(content, str) else content - content_length = len(body) - headers = {"Content-Length": str(content_length)} if body else {} - return headers, ByteStream(body) - - elif isinstance(content, Iterable) and not isinstance(content, dict): - # `not isinstance(content, dict)` is a bit oddly specific, but it - # catches a case that's easy for users to make in error, and would - # otherwise pass through here, like any other bytes-iterable, - # because `dict` happens to be iterable. See issue #2491. - content_length_or_none = peek_filelike_length(content) - - if content_length_or_none is None: - headers = {"Transfer-Encoding": "chunked"} - else: - headers = {"Content-Length": str(content_length_or_none)} - return headers, IteratorByteStream(content) # type: ignore - - elif isinstance(content, AsyncIterable): - headers = {"Transfer-Encoding": "chunked"} - return headers, AsyncIteratorByteStream(content) - - raise TypeError(f"Unexpected type for 'content', {type(content)!r}") - - -def encode_urlencoded_data( - data: RequestData, -) -> tuple[dict[str, str], ByteStream]: - plain_data = [] - for key, value in data.items(): - if isinstance(value, (list, tuple)): - plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) - else: - plain_data.append((key, primitive_value_to_str(value))) - body = urlencode(plain_data, doseq=True).encode("utf-8") - content_length = str(len(body)) - content_type = "application/x-www-form-urlencoded" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_multipart_data( - data: RequestData, files: RequestFiles, boundary: bytes | None -) -> tuple[dict[str, str], MultipartStream]: - multipart = MultipartStream(data=data, files=files, boundary=boundary) - headers = multipart.get_headers() - return headers, multipart - - -def encode_text(text: str) -> tuple[dict[str, str], ByteStream]: - body = text.encode("utf-8") - content_length = str(len(body)) - content_type = "text/plain; charset=utf-8" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_html(html: str) -> tuple[dict[str, str], ByteStream]: - body = html.encode("utf-8") - content_length = str(len(body)) - content_type = "text/html; charset=utf-8" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]: - body = json_dumps( - json, ensure_ascii=False, separators=(",", ":"), allow_nan=False - ).encode("utf-8") - content_length = str(len(body)) - content_type = "application/json" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_request( - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: Any | None = None, - boundary: bytes | None = None, -) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: - """ - Handles encoding the given `content`, `data`, `files`, and `json`, - returning a two-tuple of (, ). - """ - if data is not None and not isinstance(data, Mapping): - # We prefer to separate `content=` - # for raw request content, and `data=
` for url encoded or - # multipart form content. - # - # However for compat with requests, we *do* still support - # `data=` usages. We deal with that case here, treating it - # as if `content=<...>` had been supplied instead. - message = "Use 'content=<...>' to upload raw bytes/text content." - warnings.warn(message, DeprecationWarning, stacklevel=2) - return encode_content(data) - - if content is not None: - return encode_content(content) - elif files: - return encode_multipart_data(data or {}, files, boundary) - elif data: - return encode_urlencoded_data(data) - elif json is not None: - return encode_json(json) - - return {}, ByteStream(b"") - - -def encode_response( - content: ResponseContent | None = None, - text: str | None = None, - html: str | None = None, - json: Any | None = None, -) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: - """ - Handles encoding the given `content`, returning a two-tuple of - (, ). - """ - if content is not None: - return encode_content(content) - elif text is not None: - return encode_text(text) - elif html is not None: - return encode_html(html) - elif json is not None: - return encode_json(json) - - return {}, ByteStream(b"") diff --git a/venv/lib/python3.12/site-packages/httpx/_decoders.py b/venv/lib/python3.12/site-packages/httpx/_decoders.py deleted file mode 100644 index 899dfada..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_decoders.py +++ /dev/null @@ -1,393 +0,0 @@ -""" -Handlers for Content-Encoding. - -See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding -""" - -from __future__ import annotations - -import codecs -import io -import typing -import zlib - -from ._exceptions import DecodingError - -# Brotli support is optional -try: - # The C bindings in `brotli` are recommended for CPython. - import brotli -except ImportError: # pragma: no cover - try: - # The CFFI bindings in `brotlicffi` are recommended for PyPy - # and other environments. - import brotlicffi as brotli - except ImportError: - brotli = None - - -# Zstandard support is optional -try: - import zstandard -except ImportError: # pragma: no cover - zstandard = None # type: ignore - - -class ContentDecoder: - def decode(self, data: bytes) -> bytes: - raise NotImplementedError() # pragma: no cover - - def flush(self) -> bytes: - raise NotImplementedError() # pragma: no cover - - -class IdentityDecoder(ContentDecoder): - """ - Handle unencoded data. - """ - - def decode(self, data: bytes) -> bytes: - return data - - def flush(self) -> bytes: - return b"" - - -class DeflateDecoder(ContentDecoder): - """ - Handle 'deflate' decoding. - - See: https://stackoverflow.com/questions/1838699 - """ - - def __init__(self) -> None: - self.first_attempt = True - self.decompressor = zlib.decompressobj() - - def decode(self, data: bytes) -> bytes: - was_first_attempt = self.first_attempt - self.first_attempt = False - try: - return self.decompressor.decompress(data) - except zlib.error as exc: - if was_first_attempt: - self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) - return self.decode(data) - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - try: - return self.decompressor.flush() - except zlib.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class GZipDecoder(ContentDecoder): - """ - Handle 'gzip' decoding. - - See: https://stackoverflow.com/questions/1838699 - """ - - def __init__(self) -> None: - self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) - - def decode(self, data: bytes) -> bytes: - try: - return self.decompressor.decompress(data) - except zlib.error as exc: - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - try: - return self.decompressor.flush() - except zlib.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class BrotliDecoder(ContentDecoder): - """ - Handle 'brotli' decoding. - - Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ - or `pip install brotli`. See https://github.com/google/brotli - Supports both 'brotlipy' and 'Brotli' packages since they share an import - name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' - """ - - def __init__(self) -> None: - if brotli is None: # pragma: no cover - raise ImportError( - "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " - "packages have been installed. " - "Make sure to install httpx using `pip install httpx[brotli]`." - ) from None - - self.decompressor = brotli.Decompressor() - self.seen_data = False - self._decompress: typing.Callable[[bytes], bytes] - if hasattr(self.decompressor, "decompress"): - # The 'brotlicffi' package. - self._decompress = self.decompressor.decompress # pragma: no cover - else: - # The 'brotli' package. - self._decompress = self.decompressor.process # pragma: no cover - - def decode(self, data: bytes) -> bytes: - if not data: - return b"" - self.seen_data = True - try: - return self._decompress(data) - except brotli.error as exc: - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - if not self.seen_data: - return b"" - try: - if hasattr(self.decompressor, "finish"): - # Only available in the 'brotlicffi' package. - - # As the decompressor decompresses eagerly, this - # will never actually emit any data. However, it will potentially throw - # errors if a truncated or damaged data stream has been used. - self.decompressor.finish() # pragma: no cover - return b"" - except brotli.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class ZStandardDecoder(ContentDecoder): - """ - Handle 'zstd' RFC 8878 decoding. - - Requires `pip install zstandard`. - Can be installed as a dependency of httpx using `pip install httpx[zstd]`. - """ - - # inspired by the ZstdDecoder implementation in urllib3 - def __init__(self) -> None: - if zstandard is None: # pragma: no cover - raise ImportError( - "Using 'ZStandardDecoder', ..." - "Make sure to install httpx using `pip install httpx[zstd]`." - ) from None - - self.decompressor = zstandard.ZstdDecompressor().decompressobj() - self.seen_data = False - - def decode(self, data: bytes) -> bytes: - assert zstandard is not None - self.seen_data = True - output = io.BytesIO() - try: - output.write(self.decompressor.decompress(data)) - while self.decompressor.eof and self.decompressor.unused_data: - unused_data = self.decompressor.unused_data - self.decompressor = zstandard.ZstdDecompressor().decompressobj() - output.write(self.decompressor.decompress(unused_data)) - except zstandard.ZstdError as exc: - raise DecodingError(str(exc)) from exc - return output.getvalue() - - def flush(self) -> bytes: - if not self.seen_data: - return b"" - ret = self.decompressor.flush() # note: this is a no-op - if not self.decompressor.eof: - raise DecodingError("Zstandard data is incomplete") # pragma: no cover - return bytes(ret) - - -class MultiDecoder(ContentDecoder): - """ - Handle the case where multiple encodings have been applied. - """ - - def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: - """ - 'children' should be a sequence of decoders in the order in which - each was applied. - """ - # Note that we reverse the order for decoding. - self.children = list(reversed(children)) - - def decode(self, data: bytes) -> bytes: - for child in self.children: - data = child.decode(data) - return data - - def flush(self) -> bytes: - data = b"" - for child in self.children: - data = child.decode(data) + child.flush() - return data - - -class ByteChunker: - """ - Handles returning byte content in fixed-size chunks. - """ - - def __init__(self, chunk_size: int | None = None) -> None: - self._buffer = io.BytesIO() - self._chunk_size = chunk_size - - def decode(self, content: bytes) -> list[bytes]: - if self._chunk_size is None: - return [content] if content else [] - - self._buffer.write(content) - if self._buffer.tell() >= self._chunk_size: - value = self._buffer.getvalue() - chunks = [ - value[i : i + self._chunk_size] - for i in range(0, len(value), self._chunk_size) - ] - if len(chunks[-1]) == self._chunk_size: - self._buffer.seek(0) - self._buffer.truncate() - return chunks - else: - self._buffer.seek(0) - self._buffer.write(chunks[-1]) - self._buffer.truncate() - return chunks[:-1] - else: - return [] - - def flush(self) -> list[bytes]: - value = self._buffer.getvalue() - self._buffer.seek(0) - self._buffer.truncate() - return [value] if value else [] - - -class TextChunker: - """ - Handles returning text content in fixed-size chunks. - """ - - def __init__(self, chunk_size: int | None = None) -> None: - self._buffer = io.StringIO() - self._chunk_size = chunk_size - - def decode(self, content: str) -> list[str]: - if self._chunk_size is None: - return [content] if content else [] - - self._buffer.write(content) - if self._buffer.tell() >= self._chunk_size: - value = self._buffer.getvalue() - chunks = [ - value[i : i + self._chunk_size] - for i in range(0, len(value), self._chunk_size) - ] - if len(chunks[-1]) == self._chunk_size: - self._buffer.seek(0) - self._buffer.truncate() - return chunks - else: - self._buffer.seek(0) - self._buffer.write(chunks[-1]) - self._buffer.truncate() - return chunks[:-1] - else: - return [] - - def flush(self) -> list[str]: - value = self._buffer.getvalue() - self._buffer.seek(0) - self._buffer.truncate() - return [value] if value else [] - - -class TextDecoder: - """ - Handles incrementally decoding bytes into text - """ - - def __init__(self, encoding: str = "utf-8") -> None: - self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") - - def decode(self, data: bytes) -> str: - return self.decoder.decode(data) - - def flush(self) -> str: - return self.decoder.decode(b"", True) - - -class LineDecoder: - """ - Handles incrementally reading lines from text. - - Has the same behaviour as the stdllib splitlines, - but handling the input iteratively. - """ - - def __init__(self) -> None: - self.buffer: list[str] = [] - self.trailing_cr: bool = False - - def decode(self, text: str) -> list[str]: - # See https://docs.python.org/3/library/stdtypes.html#str.splitlines - NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" - - # We always push a trailing `\r` into the next decode iteration. - if self.trailing_cr: - text = "\r" + text - self.trailing_cr = False - if text.endswith("\r"): - self.trailing_cr = True - text = text[:-1] - - if not text: - # NOTE: the edge case input of empty text doesn't occur in practice, - # because other httpx internals filter out this value - return [] # pragma: no cover - - trailing_newline = text[-1] in NEWLINE_CHARS - lines = text.splitlines() - - if len(lines) == 1 and not trailing_newline: - # No new lines, buffer the input and continue. - self.buffer.append(lines[0]) - return [] - - if self.buffer: - # Include any existing buffer in the first portion of the - # splitlines result. - lines = ["".join(self.buffer) + lines[0]] + lines[1:] - self.buffer = [] - - if not trailing_newline: - # If the last segment of splitlines is not newline terminated, - # then drop it from our output and start a new buffer. - self.buffer = [lines.pop()] - - return lines - - def flush(self) -> list[str]: - if not self.buffer and not self.trailing_cr: - return [] - - lines = ["".join(self.buffer)] - self.buffer = [] - self.trailing_cr = False - return lines - - -SUPPORTED_DECODERS = { - "identity": IdentityDecoder, - "gzip": GZipDecoder, - "deflate": DeflateDecoder, - "br": BrotliDecoder, - "zstd": ZStandardDecoder, -} - - -if brotli is None: - SUPPORTED_DECODERS.pop("br") # pragma: no cover -if zstandard is None: - SUPPORTED_DECODERS.pop("zstd") # pragma: no cover diff --git a/venv/lib/python3.12/site-packages/httpx/_exceptions.py b/venv/lib/python3.12/site-packages/httpx/_exceptions.py deleted file mode 100644 index 77f45a6d..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_exceptions.py +++ /dev/null @@ -1,379 +0,0 @@ -""" -Our exception hierarchy: - -* HTTPError - x RequestError - + TransportError - - TimeoutException - · ConnectTimeout - · ReadTimeout - · WriteTimeout - · PoolTimeout - - NetworkError - · ConnectError - · ReadError - · WriteError - · CloseError - - ProtocolError - · LocalProtocolError - · RemoteProtocolError - - ProxyError - - UnsupportedProtocol - + DecodingError - + TooManyRedirects - x HTTPStatusError -* InvalidURL -* CookieConflict -* StreamError - x StreamConsumed - x StreamClosed - x ResponseNotRead - x RequestNotRead -""" - -from __future__ import annotations - -import contextlib -import typing - -if typing.TYPE_CHECKING: - from ._models import Request, Response # pragma: no cover - -__all__ = [ - "CloseError", - "ConnectError", - "ConnectTimeout", - "CookieConflict", - "DecodingError", - "HTTPError", - "HTTPStatusError", - "InvalidURL", - "LocalProtocolError", - "NetworkError", - "PoolTimeout", - "ProtocolError", - "ProxyError", - "ReadError", - "ReadTimeout", - "RemoteProtocolError", - "RequestError", - "RequestNotRead", - "ResponseNotRead", - "StreamClosed", - "StreamConsumed", - "StreamError", - "TimeoutException", - "TooManyRedirects", - "TransportError", - "UnsupportedProtocol", - "WriteError", - "WriteTimeout", -] - - -class HTTPError(Exception): - """ - Base class for `RequestError` and `HTTPStatusError`. - - Useful for `try...except` blocks when issuing a request, - and then calling `.raise_for_status()`. - - For example: - - ``` - try: - response = httpx.get("https://www.example.com") - response.raise_for_status() - except httpx.HTTPError as exc: - print(f"HTTP Exception for {exc.request.url} - {exc}") - ``` - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - self._request: Request | None = None - - @property - def request(self) -> Request: - if self._request is None: - raise RuntimeError("The .request property has not been set.") - return self._request - - @request.setter - def request(self, request: Request) -> None: - self._request = request - - -class RequestError(HTTPError): - """ - Base class for all exceptions that may occur when issuing a `.request()`. - """ - - def __init__(self, message: str, *, request: Request | None = None) -> None: - super().__init__(message) - # At the point an exception is raised we won't typically have a request - # instance to associate it with. - # - # The 'request_context' context manager is used within the Client and - # Response methods in order to ensure that any raised exceptions - # have a `.request` property set on them. - self._request = request - - -class TransportError(RequestError): - """ - Base class for all exceptions that occur at the level of the Transport API. - """ - - -# Timeout exceptions... - - -class TimeoutException(TransportError): - """ - The base class for timeout errors. - - An operation has timed out. - """ - - -class ConnectTimeout(TimeoutException): - """ - Timed out while connecting to the host. - """ - - -class ReadTimeout(TimeoutException): - """ - Timed out while receiving data from the host. - """ - - -class WriteTimeout(TimeoutException): - """ - Timed out while sending data to the host. - """ - - -class PoolTimeout(TimeoutException): - """ - Timed out waiting to acquire a connection from the pool. - """ - - -# Core networking exceptions... - - -class NetworkError(TransportError): - """ - The base class for network-related errors. - - An error occurred while interacting with the network. - """ - - -class ReadError(NetworkError): - """ - Failed to receive data from the network. - """ - - -class WriteError(NetworkError): - """ - Failed to send data through the network. - """ - - -class ConnectError(NetworkError): - """ - Failed to establish a connection. - """ - - -class CloseError(NetworkError): - """ - Failed to close a connection. - """ - - -# Other transport exceptions... - - -class ProxyError(TransportError): - """ - An error occurred while establishing a proxy connection. - """ - - -class UnsupportedProtocol(TransportError): - """ - Attempted to make a request to an unsupported protocol. - - For example issuing a request to `ftp://www.example.com`. - """ - - -class ProtocolError(TransportError): - """ - The protocol was violated. - """ - - -class LocalProtocolError(ProtocolError): - """ - A protocol was violated by the client. - - For example if the user instantiated a `Request` instance explicitly, - failed to include the mandatory `Host:` header, and then issued it directly - using `client.send()`. - """ - - -class RemoteProtocolError(ProtocolError): - """ - The protocol was violated by the server. - - For example, returning malformed HTTP. - """ - - -# Other request exceptions... - - -class DecodingError(RequestError): - """ - Decoding of the response failed, due to a malformed encoding. - """ - - -class TooManyRedirects(RequestError): - """ - Too many redirects. - """ - - -# Client errors - - -class HTTPStatusError(HTTPError): - """ - The response had an error HTTP status of 4xx or 5xx. - - May be raised when calling `response.raise_for_status()` - """ - - def __init__(self, message: str, *, request: Request, response: Response) -> None: - super().__init__(message) - self.request = request - self.response = response - - -class InvalidURL(Exception): - """ - URL is improperly formed or cannot be parsed. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -class CookieConflict(Exception): - """ - Attempted to lookup a cookie by name, but multiple cookies existed. - - Can occur when calling `response.cookies.get(...)`. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -# Stream exceptions... - -# These may occur as the result of a programming error, by accessing -# the request/response stream in an invalid manner. - - -class StreamError(RuntimeError): - """ - The base class for stream exceptions. - - The developer made an error in accessing the request stream in - an invalid way. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -class StreamConsumed(StreamError): - """ - Attempted to read or stream content, but the content has already - been streamed. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream some content, but the content has " - "already been streamed. For requests, this could be due to passing " - "a generator as request content, and then receiving a redirect " - "response or a secondary request as part of an authentication flow." - "For responses, this could be due to attempting to stream the response " - "content more than once." - ) - super().__init__(message) - - -class StreamClosed(StreamError): - """ - Attempted to read or stream response content, but the request has been - closed. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream content, but the stream has " "been closed." - ) - super().__init__(message) - - -class ResponseNotRead(StreamError): - """ - Attempted to access streaming response content, without having called `read()`. - """ - - def __init__(self) -> None: - message = ( - "Attempted to access streaming response content," - " without having called `read()`." - ) - super().__init__(message) - - -class RequestNotRead(StreamError): - """ - Attempted to access streaming request content, without having called `read()`. - """ - - def __init__(self) -> None: - message = ( - "Attempted to access streaming request content," - " without having called `read()`." - ) - super().__init__(message) - - -@contextlib.contextmanager -def request_context( - request: Request | None = None, -) -> typing.Iterator[None]: - """ - A context manager that can be used to attach the given request context - to any `RequestError` exceptions that are raised within the block. - """ - try: - yield - except RequestError as exc: - if request is not None: - exc.request = request - raise exc diff --git a/venv/lib/python3.12/site-packages/httpx/_main.py b/venv/lib/python3.12/site-packages/httpx/_main.py deleted file mode 100644 index cffa4bb7..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_main.py +++ /dev/null @@ -1,506 +0,0 @@ -from __future__ import annotations - -import functools -import json -import sys -import typing - -import click -import pygments.lexers -import pygments.util -import rich.console -import rich.markup -import rich.progress -import rich.syntax -import rich.table - -from ._client import Client -from ._exceptions import RequestError -from ._models import Response -from ._status_codes import codes - -if typing.TYPE_CHECKING: - import httpcore # pragma: no cover - - -def print_help() -> None: - console = rich.console.Console() - - console.print("[bold]HTTPX :butterfly:", justify="center") - console.print() - console.print("A next generation HTTP client.", justify="center") - console.print() - console.print( - "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" - ) - console.print() - - table = rich.table.Table.grid(padding=1, pad_edge=True) - table.add_column("Parameter", no_wrap=True, justify="left", style="bold") - table.add_column("Description") - table.add_row( - "-m, --method [cyan]METHOD", - "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" - "[Default: GET, or POST if a request body is included]", - ) - table.add_row( - "-p, --params [cyan] ...", - "Query parameters to include in the request URL.", - ) - table.add_row( - "-c, --content [cyan]TEXT", "Byte content to include in the request body." - ) - table.add_row( - "-d, --data [cyan] ...", "Form data to include in the request body." - ) - table.add_row( - "-f, --files [cyan] ...", - "Form files to include in the request body.", - ) - table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") - table.add_row( - "-h, --headers [cyan] ...", - "Include additional HTTP headers in the request.", - ) - table.add_row( - "--cookies [cyan] ...", "Cookies to include in the request." - ) - table.add_row( - "--auth [cyan]", - "Username and password to include in the request. Specify '-' for the password" - " to use a password prompt. Note that using --verbose/-v will expose" - " the Authorization header, including the password encoding" - " in a trivially reversible format.", - ) - - table.add_row( - "--proxy [cyan]URL", - "Send the request via a proxy. Should be the URL giving the proxy address.", - ) - - table.add_row( - "--timeout [cyan]FLOAT", - "Timeout value to use for network operations, such as establishing the" - " connection, reading some data, etc... [Default: 5.0]", - ) - - table.add_row("--follow-redirects", "Automatically follow redirects.") - table.add_row("--no-verify", "Disable SSL verification.") - table.add_row( - "--http2", "Send the request using HTTP/2, if the remote server supports it." - ) - - table.add_row( - "--download [cyan]FILE", - "Save the response content as a file, rather than displaying it.", - ) - - table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") - table.add_row("--help", "Show this message and exit.") - console.print(table) - - -def get_lexer_for_response(response: Response) -> str: - content_type = response.headers.get("Content-Type") - if content_type is not None: - mime_type, _, _ = content_type.partition(";") - try: - return typing.cast( - str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name - ) - except pygments.util.ClassNotFound: # pragma: no cover - pass - return "" # pragma: no cover - - -def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: - version = "HTTP/2" if http2 else "HTTP/1.1" - headers = [ - (name.lower() if http2 else name, value) for name, value in request.headers - ] - method = request.method.decode("ascii") - target = request.url.target.decode("ascii") - lines = [f"{method} {target} {version}"] + [ - f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers - ] - return "\n".join(lines) - - -def format_response_headers( - http_version: bytes, - status: int, - reason_phrase: bytes | None, - headers: list[tuple[bytes, bytes]], -) -> str: - version = http_version.decode("ascii") - reason = ( - codes.get_reason_phrase(status) - if reason_phrase is None - else reason_phrase.decode("ascii") - ) - lines = [f"{version} {status} {reason}"] + [ - f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers - ] - return "\n".join(lines) - - -def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: - console = rich.console.Console() - http_text = format_request_headers(request, http2=http2) - syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - - -def print_response_headers( - http_version: bytes, - status: int, - reason_phrase: bytes | None, - headers: list[tuple[bytes, bytes]], -) -> None: - console = rich.console.Console() - http_text = format_response_headers(http_version, status, reason_phrase, headers) - syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - - -def print_response(response: Response) -> None: - console = rich.console.Console() - lexer_name = get_lexer_for_response(response) - if lexer_name: - if lexer_name.lower() == "json": - try: - data = response.json() - text = json.dumps(data, indent=4) - except ValueError: # pragma: no cover - text = response.text - else: - text = response.text - - syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) - console.print(syntax) - else: - console.print(f"<{len(response.content)} bytes of binary data>") - - -_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] -_PCTRTTT = typing.Tuple[_PCTRTT, ...] -_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] - - -def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover - lines = [] - for key, value in cert.items(): - if isinstance(value, (list, tuple)): - lines.append(f"* {key}:") - for item in value: - if key in ("subject", "issuer"): - for sub_item in item: - lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") - elif isinstance(item, tuple) and len(item) == 2: - lines.append(f"* {item[0]}: {item[1]!r}") - else: - lines.append(f"* {item!r}") - else: - lines.append(f"* {key}: {value!r}") - return "\n".join(lines) - - -def trace( - name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False -) -> None: - console = rich.console.Console() - if name == "connection.connect_tcp.started" and verbose: - host = info["host"] - console.print(f"* Connecting to {host!r}") - elif name == "connection.connect_tcp.complete" and verbose: - stream = info["return_value"] - server_addr = stream.get_extra_info("server_addr") - console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") - elif name == "connection.start_tls.complete" and verbose: # pragma: no cover - stream = info["return_value"] - ssl_object = stream.get_extra_info("ssl_object") - version = ssl_object.version() - cipher = ssl_object.cipher() - server_cert = ssl_object.getpeercert() - alpn = ssl_object.selected_alpn_protocol() - console.print(f"* SSL established using {version!r} / {cipher[0]!r}") - console.print(f"* Selected ALPN protocol: {alpn!r}") - if server_cert: - console.print("* Server certificate:") - console.print(format_certificate(server_cert)) - elif name == "http11.send_request_headers.started" and verbose: - request = info["request"] - print_request_headers(request, http2=False) - elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover - request = info["request"] - print_request_headers(request, http2=True) - elif name == "http11.receive_response_headers.complete": - http_version, status, reason_phrase, headers = info["return_value"] - print_response_headers(http_version, status, reason_phrase, headers) - elif name == "http2.receive_response_headers.complete": # pragma: no cover - status, headers = info["return_value"] - http_version = b"HTTP/2" - reason_phrase = None - print_response_headers(http_version, status, reason_phrase, headers) - - -def download_response(response: Response, download: typing.BinaryIO) -> None: - console = rich.console.Console() - console.print() - content_length = response.headers.get("Content-Length") - with rich.progress.Progress( - "[progress.description]{task.description}", - "[progress.percentage]{task.percentage:>3.0f}%", - rich.progress.BarColumn(bar_width=None), - rich.progress.DownloadColumn(), - rich.progress.TransferSpeedColumn(), - ) as progress: - description = f"Downloading [bold]{rich.markup.escape(download.name)}" - download_task = progress.add_task( - description, - total=int(content_length or 0), - start=content_length is not None, - ) - for chunk in response.iter_bytes(): - download.write(chunk) - progress.update(download_task, completed=response.num_bytes_downloaded) - - -def validate_json( - ctx: click.Context, - param: click.Option | click.Parameter, - value: typing.Any, -) -> typing.Any: - if value is None: - return None - - try: - return json.loads(value) - except json.JSONDecodeError: # pragma: no cover - raise click.BadParameter("Not valid JSON") - - -def validate_auth( - ctx: click.Context, - param: click.Option | click.Parameter, - value: typing.Any, -) -> typing.Any: - if value == (None, None): - return None - - username, password = value - if password == "-": # pragma: no cover - password = click.prompt("Password", hide_input=True) - return (username, password) - - -def handle_help( - ctx: click.Context, - param: click.Option | click.Parameter, - value: typing.Any, -) -> None: - if not value or ctx.resilient_parsing: - return - - print_help() - ctx.exit() - - -@click.command(add_help_option=False) -@click.argument("url", type=str) -@click.option( - "--method", - "-m", - "method", - type=str, - help=( - "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " - "[Default: GET, or POST if a request body is included]" - ), -) -@click.option( - "--params", - "-p", - "params", - type=(str, str), - multiple=True, - help="Query parameters to include in the request URL.", -) -@click.option( - "--content", - "-c", - "content", - type=str, - help="Byte content to include in the request body.", -) -@click.option( - "--data", - "-d", - "data", - type=(str, str), - multiple=True, - help="Form data to include in the request body.", -) -@click.option( - "--files", - "-f", - "files", - type=(str, click.File(mode="rb")), - multiple=True, - help="Form files to include in the request body.", -) -@click.option( - "--json", - "-j", - "json", - type=str, - callback=validate_json, - help="JSON data to include in the request body.", -) -@click.option( - "--headers", - "-h", - "headers", - type=(str, str), - multiple=True, - help="Include additional HTTP headers in the request.", -) -@click.option( - "--cookies", - "cookies", - type=(str, str), - multiple=True, - help="Cookies to include in the request.", -) -@click.option( - "--auth", - "auth", - type=(str, str), - default=(None, None), - callback=validate_auth, - help=( - "Username and password to include in the request. " - "Specify '-' for the password to use a password prompt. " - "Note that using --verbose/-v will expose the Authorization header, " - "including the password encoding in a trivially reversible format." - ), -) -@click.option( - "--proxy", - "proxy", - type=str, - default=None, - help="Send the request via a proxy. Should be the URL giving the proxy address.", -) -@click.option( - "--timeout", - "timeout", - type=float, - default=5.0, - help=( - "Timeout value to use for network operations, such as establishing the " - "connection, reading some data, etc... [Default: 5.0]" - ), -) -@click.option( - "--follow-redirects", - "follow_redirects", - is_flag=True, - default=False, - help="Automatically follow redirects.", -) -@click.option( - "--no-verify", - "verify", - is_flag=True, - default=True, - help="Disable SSL verification.", -) -@click.option( - "--http2", - "http2", - type=bool, - is_flag=True, - default=False, - help="Send the request using HTTP/2, if the remote server supports it.", -) -@click.option( - "--download", - type=click.File("wb"), - help="Save the response content as a file, rather than displaying it.", -) -@click.option( - "--verbose", - "-v", - type=bool, - is_flag=True, - default=False, - help="Verbose. Show request as well as response.", -) -@click.option( - "--help", - is_flag=True, - is_eager=True, - expose_value=False, - callback=handle_help, - help="Show this message and exit.", -) -def main( - url: str, - method: str, - params: list[tuple[str, str]], - content: str, - data: list[tuple[str, str]], - files: list[tuple[str, click.File]], - json: str, - headers: list[tuple[str, str]], - cookies: list[tuple[str, str]], - auth: tuple[str, str] | None, - proxy: str, - timeout: float, - follow_redirects: bool, - verify: bool, - http2: bool, - download: typing.BinaryIO | None, - verbose: bool, -) -> None: - """ - An HTTP command line client. - Sends a request and displays the response. - """ - if not method: - method = "POST" if content or data or files or json else "GET" - - try: - with Client(proxy=proxy, timeout=timeout, http2=http2, verify=verify) as client: - with client.stream( - method, - url, - params=list(params), - content=content, - data=dict(data), - files=files, # type: ignore - json=json, - headers=headers, - cookies=dict(cookies), - auth=auth, - follow_redirects=follow_redirects, - extensions={"trace": functools.partial(trace, verbose=verbose)}, - ) as response: - if download is not None: - download_response(response, download) - else: - response.read() - if response.content: - print_response(response) - - except RequestError as exc: - console = rich.console.Console() - console.print(f"[red]{type(exc).__name__}[/red]: {exc}") - sys.exit(1) - - sys.exit(0 if response.is_success else 1) diff --git a/venv/lib/python3.12/site-packages/httpx/_models.py b/venv/lib/python3.12/site-packages/httpx/_models.py deleted file mode 100644 index 67d74bf8..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_models.py +++ /dev/null @@ -1,1277 +0,0 @@ -from __future__ import annotations - -import codecs -import datetime -import email.message -import json as jsonlib -import re -import typing -import urllib.request -from collections.abc import Mapping -from http.cookiejar import Cookie, CookieJar - -from ._content import ByteStream, UnattachedStream, encode_request, encode_response -from ._decoders import ( - SUPPORTED_DECODERS, - ByteChunker, - ContentDecoder, - IdentityDecoder, - LineDecoder, - MultiDecoder, - TextChunker, - TextDecoder, -) -from ._exceptions import ( - CookieConflict, - HTTPStatusError, - RequestNotRead, - ResponseNotRead, - StreamClosed, - StreamConsumed, - request_context, -) -from ._multipart import get_multipart_boundary_from_content_type -from ._status_codes import codes -from ._types import ( - AsyncByteStream, - CookieTypes, - HeaderTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestExtensions, - RequestFiles, - ResponseContent, - ResponseExtensions, - SyncByteStream, -) -from ._urls import URL -from ._utils import to_bytes_or_str, to_str - -__all__ = ["Cookies", "Headers", "Request", "Response"] - -SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} - - -def _is_known_encoding(encoding: str) -> bool: - """ - Return `True` if `encoding` is a known codec. - """ - try: - codecs.lookup(encoding) - except LookupError: - return False - return True - - -def _normalize_header_key(key: str | bytes, encoding: str | None = None) -> bytes: - """ - Coerce str/bytes into a strictly byte-wise HTTP header key. - """ - return key if isinstance(key, bytes) else key.encode(encoding or "ascii") - - -def _normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes: - """ - Coerce str/bytes into a strictly byte-wise HTTP header value. - """ - if isinstance(value, bytes): - return value - if not isinstance(value, str): - raise TypeError(f"Header value must be str or bytes, not {type(value)}") - return value.encode(encoding or "ascii") - - -def _parse_content_type_charset(content_type: str) -> str | None: - # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. - # See: https://peps.python.org/pep-0594/#cgi - msg = email.message.Message() - msg["content-type"] = content_type - return msg.get_content_charset(failobj=None) - - -def _parse_header_links(value: str) -> list[dict[str, str]]: - """ - Returns a list of parsed link headers, for more info see: - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link - The generic syntax of those is: - Link: < uri-reference >; param1=value1; param2="value2" - So for instance: - Link; '; type="image/jpeg",;' - would return - [ - {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, - {"url": "http://.../back.jpeg"}, - ] - :param value: HTTP Link entity-header field - :return: list of parsed link headers - """ - links: list[dict[str, str]] = [] - replace_chars = " '\"" - value = value.strip(replace_chars) - if not value: - return links - for val in re.split(", *<", value): - try: - url, params = val.split(";", 1) - except ValueError: - url, params = val, "" - link = {"url": url.strip("<> '\"")} - for param in params.split(";"): - try: - key, value = param.split("=") - except ValueError: - break - link[key.strip(replace_chars)] = value.strip(replace_chars) - links.append(link) - return links - - -def _obfuscate_sensitive_headers( - items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]], -) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]: - for k, v in items: - if to_str(k.lower()) in SENSITIVE_HEADERS: - v = to_bytes_or_str("[secure]", match_type_of=v) - yield k, v - - -class Headers(typing.MutableMapping[str, str]): - """ - HTTP headers, as a case-insensitive multi-dict. - """ - - def __init__( - self, - headers: HeaderTypes | None = None, - encoding: str | None = None, - ) -> None: - self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] - - if isinstance(headers, Headers): - self._list = list(headers._list) - elif isinstance(headers, Mapping): - for k, v in headers.items(): - bytes_key = _normalize_header_key(k, encoding) - bytes_value = _normalize_header_value(v, encoding) - self._list.append((bytes_key, bytes_key.lower(), bytes_value)) - elif headers is not None: - for k, v in headers: - bytes_key = _normalize_header_key(k, encoding) - bytes_value = _normalize_header_value(v, encoding) - self._list.append((bytes_key, bytes_key.lower(), bytes_value)) - - self._encoding = encoding - - @property - def encoding(self) -> str: - """ - Header encoding is mandated as ascii, but we allow fallbacks to utf-8 - or iso-8859-1. - """ - if self._encoding is None: - for encoding in ["ascii", "utf-8"]: - for key, value in self.raw: - try: - key.decode(encoding) - value.decode(encoding) - except UnicodeDecodeError: - break - else: - # The else block runs if 'break' did not occur, meaning - # all values fitted the encoding. - self._encoding = encoding - break - else: - # The ISO-8859-1 encoding covers all 256 code points in a byte, - # so will never raise decode errors. - self._encoding = "iso-8859-1" - return self._encoding - - @encoding.setter - def encoding(self, value: str) -> None: - self._encoding = value - - @property - def raw(self) -> list[tuple[bytes, bytes]]: - """ - Returns a list of the raw header items, as byte pairs. - """ - return [(raw_key, value) for raw_key, _, value in self._list] - - def keys(self) -> typing.KeysView[str]: - return {key.decode(self.encoding): None for _, key, value in self._list}.keys() - - def values(self) -> typing.ValuesView[str]: - values_dict: dict[str, str] = {} - for _, key, value in self._list: - str_key = key.decode(self.encoding) - str_value = value.decode(self.encoding) - if str_key in values_dict: - values_dict[str_key] += f", {str_value}" - else: - values_dict[str_key] = str_value - return values_dict.values() - - def items(self) -> typing.ItemsView[str, str]: - """ - Return `(key, value)` items of headers. Concatenate headers - into a single comma separated value when a key occurs multiple times. - """ - values_dict: dict[str, str] = {} - for _, key, value in self._list: - str_key = key.decode(self.encoding) - str_value = value.decode(self.encoding) - if str_key in values_dict: - values_dict[str_key] += f", {str_value}" - else: - values_dict[str_key] = str_value - return values_dict.items() - - def multi_items(self) -> list[tuple[str, str]]: - """ - Return a list of `(key, value)` pairs of headers. Allow multiple - occurrences of the same key without concatenating into a single - comma separated value. - """ - return [ - (key.decode(self.encoding), value.decode(self.encoding)) - for _, key, value in self._list - ] - - def get(self, key: str, default: typing.Any = None) -> typing.Any: - """ - Return a header value. If multiple occurrences of the header occur - then concatenate them together with commas. - """ - try: - return self[key] - except KeyError: - return default - - def get_list(self, key: str, split_commas: bool = False) -> list[str]: - """ - Return a list of all header values for a given key. - If `split_commas=True` is passed, then any comma separated header - values are split into multiple return strings. - """ - get_header_key = key.lower().encode(self.encoding) - - values = [ - item_value.decode(self.encoding) - for _, item_key, item_value in self._list - if item_key.lower() == get_header_key - ] - - if not split_commas: - return values - - split_values = [] - for value in values: - split_values.extend([item.strip() for item in value.split(",")]) - return split_values - - def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore - headers = Headers(headers) - for key in headers.keys(): - if key in self: - self.pop(key) - self._list.extend(headers._list) - - def copy(self) -> Headers: - return Headers(self, encoding=self.encoding) - - def __getitem__(self, key: str) -> str: - """ - Return a single header value. - - If there are multiple headers with the same key, then we concatenate - them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 - """ - normalized_key = key.lower().encode(self.encoding) - - items = [ - header_value.decode(self.encoding) - for _, header_key, header_value in self._list - if header_key == normalized_key - ] - - if items: - return ", ".join(items) - - raise KeyError(key) - - def __setitem__(self, key: str, value: str) -> None: - """ - Set the header `key` to `value`, removing any duplicate entries. - Retains insertion order. - """ - set_key = key.encode(self._encoding or "utf-8") - set_value = value.encode(self._encoding or "utf-8") - lookup_key = set_key.lower() - - found_indexes = [ - idx - for idx, (_, item_key, _) in enumerate(self._list) - if item_key == lookup_key - ] - - for idx in reversed(found_indexes[1:]): - del self._list[idx] - - if found_indexes: - idx = found_indexes[0] - self._list[idx] = (set_key, lookup_key, set_value) - else: - self._list.append((set_key, lookup_key, set_value)) - - def __delitem__(self, key: str) -> None: - """ - Remove the header `key`. - """ - del_key = key.lower().encode(self.encoding) - - pop_indexes = [ - idx - for idx, (_, item_key, _) in enumerate(self._list) - if item_key.lower() == del_key - ] - - if not pop_indexes: - raise KeyError(key) - - for idx in reversed(pop_indexes): - del self._list[idx] - - def __contains__(self, key: typing.Any) -> bool: - header_key = key.lower().encode(self.encoding) - return header_key in [key for _, key, _ in self._list] - - def __iter__(self) -> typing.Iterator[typing.Any]: - return iter(self.keys()) - - def __len__(self) -> int: - return len(self._list) - - def __eq__(self, other: typing.Any) -> bool: - try: - other_headers = Headers(other) - except ValueError: - return False - - self_list = [(key, value) for _, key, value in self._list] - other_list = [(key, value) for _, key, value in other_headers._list] - return sorted(self_list) == sorted(other_list) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - - encoding_str = "" - if self.encoding != "ascii": - encoding_str = f", encoding={self.encoding!r}" - - as_list = list(_obfuscate_sensitive_headers(self.multi_items())) - as_dict = dict(as_list) - - no_duplicate_keys = len(as_dict) == len(as_list) - if no_duplicate_keys: - return f"{class_name}({as_dict!r}{encoding_str})" - return f"{class_name}({as_list!r}{encoding_str})" - - -class Request: - def __init__( - self, - method: str, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - stream: SyncByteStream | AsyncByteStream | None = None, - extensions: RequestExtensions | None = None, - ) -> None: - self.method = method.upper() - self.url = URL(url) if params is None else URL(url, params=params) - self.headers = Headers(headers) - self.extensions = {} if extensions is None else dict(extensions) - - if cookies: - Cookies(cookies).set_cookie_header(self) - - if stream is None: - content_type: str | None = self.headers.get("content-type") - headers, stream = encode_request( - content=content, - data=data, - files=files, - json=json, - boundary=get_multipart_boundary_from_content_type( - content_type=content_type.encode(self.headers.encoding) - if content_type - else None - ), - ) - self._prepare(headers) - self.stream = stream - # Load the request body, except for streaming content. - if isinstance(stream, ByteStream): - self.read() - else: - # There's an important distinction between `Request(content=...)`, - # and `Request(stream=...)`. - # - # Using `content=...` implies automatically populated `Host` and content - # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. - # - # Using `stream=...` will not automatically include *any* - # auto-populated headers. - # - # As an end-user you don't really need `stream=...`. It's only - # useful when: - # - # * Preserving the request stream when copying requests, eg for redirects. - # * Creating request instances on the *server-side* of the transport API. - self.stream = stream - - def _prepare(self, default_headers: dict[str, str]) -> None: - for key, value in default_headers.items(): - # Ignore Transfer-Encoding if the Content-Length has been set explicitly. - if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: - continue - self.headers.setdefault(key, value) - - auto_headers: list[tuple[bytes, bytes]] = [] - - has_host = "Host" in self.headers - has_content_length = ( - "Content-Length" in self.headers or "Transfer-Encoding" in self.headers - ) - - if not has_host and self.url.host: - auto_headers.append((b"Host", self.url.netloc)) - if not has_content_length and self.method in ("POST", "PUT", "PATCH"): - auto_headers.append((b"Content-Length", b"0")) - - self.headers = Headers(auto_headers + self.headers.raw) - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - raise RequestNotRead() - return self._content - - def read(self) -> bytes: - """ - Read and return the request content. - """ - if not hasattr(self, "_content"): - assert isinstance(self.stream, typing.Iterable) - self._content = b"".join(self.stream) - if not isinstance(self.stream, ByteStream): - # If a streaming request has been read entirely into memory, then - # we can replace the stream with a raw bytes implementation, - # to ensure that any non-replayable streams can still be used. - self.stream = ByteStream(self._content) - return self._content - - async def aread(self) -> bytes: - """ - Read and return the request content. - """ - if not hasattr(self, "_content"): - assert isinstance(self.stream, typing.AsyncIterable) - self._content = b"".join([part async for part in self.stream]) - if not isinstance(self.stream, ByteStream): - # If a streaming request has been read entirely into memory, then - # we can replace the stream with a raw bytes implementation, - # to ensure that any non-replayable streams can still be used. - self.stream = ByteStream(self._content) - return self._content - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - url = str(self.url) - return f"<{class_name}({self.method!r}, {url!r})>" - - def __getstate__(self) -> dict[str, typing.Any]: - return { - name: value - for name, value in self.__dict__.items() - if name not in ["extensions", "stream"] - } - - def __setstate__(self, state: dict[str, typing.Any]) -> None: - for name, value in state.items(): - setattr(self, name, value) - self.extensions = {} - self.stream = UnattachedStream() - - -class Response: - def __init__( - self, - status_code: int, - *, - headers: HeaderTypes | None = None, - content: ResponseContent | None = None, - text: str | None = None, - html: str | None = None, - json: typing.Any = None, - stream: SyncByteStream | AsyncByteStream | None = None, - request: Request | None = None, - extensions: ResponseExtensions | None = None, - history: list[Response] | None = None, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - self.status_code = status_code - self.headers = Headers(headers) - - self._request: Request | None = request - - # When follow_redirects=False and a redirect is received, - # the client will set `response.next_request`. - self.next_request: Request | None = None - - self.extensions = {} if extensions is None else dict(extensions) - self.history = [] if history is None else list(history) - - self.is_closed = False - self.is_stream_consumed = False - - self.default_encoding = default_encoding - - if stream is None: - headers, stream = encode_response(content, text, html, json) - self._prepare(headers) - self.stream = stream - if isinstance(stream, ByteStream): - # Load the response body, except for streaming content. - self.read() - else: - # There's an important distinction between `Response(content=...)`, - # and `Response(stream=...)`. - # - # Using `content=...` implies automatically populated content headers, - # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. - # - # Using `stream=...` will not automatically include any content headers. - # - # As an end-user you don't really need `stream=...`. It's only - # useful when creating response instances having received a stream - # from the transport API. - self.stream = stream - - self._num_bytes_downloaded = 0 - - def _prepare(self, default_headers: dict[str, str]) -> None: - for key, value in default_headers.items(): - # Ignore Transfer-Encoding if the Content-Length has been set explicitly. - if key.lower() == "transfer-encoding" and "content-length" in self.headers: - continue - self.headers.setdefault(key, value) - - @property - def elapsed(self) -> datetime.timedelta: - """ - Returns the time taken for the complete request/response - cycle to complete. - """ - if not hasattr(self, "_elapsed"): - raise RuntimeError( - "'.elapsed' may only be accessed after the response " - "has been read or closed." - ) - return self._elapsed - - @elapsed.setter - def elapsed(self, elapsed: datetime.timedelta) -> None: - self._elapsed = elapsed - - @property - def request(self) -> Request: - """ - Returns the request instance associated to the current response. - """ - if self._request is None: - raise RuntimeError( - "The request instance has not been set on this response." - ) - return self._request - - @request.setter - def request(self, value: Request) -> None: - self._request = value - - @property - def http_version(self) -> str: - try: - http_version: bytes = self.extensions["http_version"] - except KeyError: - return "HTTP/1.1" - else: - return http_version.decode("ascii", errors="ignore") - - @property - def reason_phrase(self) -> str: - try: - reason_phrase: bytes = self.extensions["reason_phrase"] - except KeyError: - return codes.get_reason_phrase(self.status_code) - else: - return reason_phrase.decode("ascii", errors="ignore") - - @property - def url(self) -> URL: - """ - Returns the URL for which the request was made. - """ - return self.request.url - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - raise ResponseNotRead() - return self._content - - @property - def text(self) -> str: - if not hasattr(self, "_text"): - content = self.content - if not content: - self._text = "" - else: - decoder = TextDecoder(encoding=self.encoding or "utf-8") - self._text = "".join([decoder.decode(self.content), decoder.flush()]) - return self._text - - @property - def encoding(self) -> str | None: - """ - Return an encoding to use for decoding the byte content into text. - The priority for determining this is given by... - - * `.encoding = <>` has been set explicitly. - * The encoding as specified by the charset parameter in the Content-Type header. - * The encoding as determined by `default_encoding`, which may either be - a string like "utf-8" indicating the encoding to use, or may be a callable - which enables charset autodetection. - """ - if not hasattr(self, "_encoding"): - encoding = self.charset_encoding - if encoding is None or not _is_known_encoding(encoding): - if isinstance(self.default_encoding, str): - encoding = self.default_encoding - elif hasattr(self, "_content"): - encoding = self.default_encoding(self._content) - self._encoding = encoding or "utf-8" - return self._encoding - - @encoding.setter - def encoding(self, value: str) -> None: - """ - Set the encoding to use for decoding the byte content into text. - - If the `text` attribute has been accessed, attempting to set the - encoding will throw a ValueError. - """ - if hasattr(self, "_text"): - raise ValueError( - "Setting encoding after `text` has been accessed is not allowed." - ) - self._encoding = value - - @property - def charset_encoding(self) -> str | None: - """ - Return the encoding, as specified by the Content-Type header. - """ - content_type = self.headers.get("Content-Type") - if content_type is None: - return None - - return _parse_content_type_charset(content_type) - - def _get_content_decoder(self) -> ContentDecoder: - """ - Returns a decoder instance which can be used to decode the raw byte - content, depending on the Content-Encoding used in the response. - """ - if not hasattr(self, "_decoder"): - decoders: list[ContentDecoder] = [] - values = self.headers.get_list("content-encoding", split_commas=True) - for value in values: - value = value.strip().lower() - try: - decoder_cls = SUPPORTED_DECODERS[value] - decoders.append(decoder_cls()) - except KeyError: - continue - - if len(decoders) == 1: - self._decoder = decoders[0] - elif len(decoders) > 1: - self._decoder = MultiDecoder(children=decoders) - else: - self._decoder = IdentityDecoder() - - return self._decoder - - @property - def is_informational(self) -> bool: - """ - A property which is `True` for 1xx status codes, `False` otherwise. - """ - return codes.is_informational(self.status_code) - - @property - def is_success(self) -> bool: - """ - A property which is `True` for 2xx status codes, `False` otherwise. - """ - return codes.is_success(self.status_code) - - @property - def is_redirect(self) -> bool: - """ - A property which is `True` for 3xx status codes, `False` otherwise. - - Note that not all responses with a 3xx status code indicate a URL redirect. - - Use `response.has_redirect_location` to determine responses with a properly - formed URL redirection. - """ - return codes.is_redirect(self.status_code) - - @property - def is_client_error(self) -> bool: - """ - A property which is `True` for 4xx status codes, `False` otherwise. - """ - return codes.is_client_error(self.status_code) - - @property - def is_server_error(self) -> bool: - """ - A property which is `True` for 5xx status codes, `False` otherwise. - """ - return codes.is_server_error(self.status_code) - - @property - def is_error(self) -> bool: - """ - A property which is `True` for 4xx and 5xx status codes, `False` otherwise. - """ - return codes.is_error(self.status_code) - - @property - def has_redirect_location(self) -> bool: - """ - Returns True for 3xx responses with a properly formed URL redirection, - `False` otherwise. - """ - return ( - self.status_code - in ( - # 301 (Cacheable redirect. Method may change to GET.) - codes.MOVED_PERMANENTLY, - # 302 (Uncacheable redirect. Method may change to GET.) - codes.FOUND, - # 303 (Client should make a GET or HEAD request.) - codes.SEE_OTHER, - # 307 (Equiv. 302, but retain method) - codes.TEMPORARY_REDIRECT, - # 308 (Equiv. 301, but retain method) - codes.PERMANENT_REDIRECT, - ) - and "Location" in self.headers - ) - - def raise_for_status(self) -> Response: - """ - Raise the `HTTPStatusError` if one occurred. - """ - request = self._request - if request is None: - raise RuntimeError( - "Cannot call `raise_for_status` as the request " - "instance has not been set on this response." - ) - - if self.is_success: - return self - - if self.has_redirect_location: - message = ( - "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" - "Redirect location: '{0.headers[location]}'\n" - "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" - ) - else: - message = ( - "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" - "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" - ) - - status_class = self.status_code // 100 - error_types = { - 1: "Informational response", - 3: "Redirect response", - 4: "Client error", - 5: "Server error", - } - error_type = error_types.get(status_class, "Invalid status code") - message = message.format(self, error_type=error_type) - raise HTTPStatusError(message, request=request, response=self) - - def json(self, **kwargs: typing.Any) -> typing.Any: - return jsonlib.loads(self.content, **kwargs) - - @property - def cookies(self) -> Cookies: - if not hasattr(self, "_cookies"): - self._cookies = Cookies() - self._cookies.extract_cookies(self) - return self._cookies - - @property - def links(self) -> dict[str | None, dict[str, str]]: - """ - Returns the parsed header links of the response, if any - """ - header = self.headers.get("link") - if header is None: - return {} - - return { - (link.get("rel") or link.get("url")): link - for link in _parse_header_links(header) - } - - @property - def num_bytes_downloaded(self) -> int: - return self._num_bytes_downloaded - - def __repr__(self) -> str: - return f"" - - def __getstate__(self) -> dict[str, typing.Any]: - return { - name: value - for name, value in self.__dict__.items() - if name not in ["extensions", "stream", "is_closed", "_decoder"] - } - - def __setstate__(self, state: dict[str, typing.Any]) -> None: - for name, value in state.items(): - setattr(self, name, value) - self.is_closed = True - self.extensions = {} - self.stream = UnattachedStream() - - def read(self) -> bytes: - """ - Read and return the response content. - """ - if not hasattr(self, "_content"): - self._content = b"".join(self.iter_bytes()) - return self._content - - def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: - """ - A byte-iterator over the decoded response content. - This allows us to handle gzip, deflate, brotli, and zstd encoded responses. - """ - if hasattr(self, "_content"): - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), max(chunk_size, 1)): - yield self._content[i : i + chunk_size] - else: - decoder = self._get_content_decoder() - chunker = ByteChunker(chunk_size=chunk_size) - with request_context(request=self._request): - for raw_bytes in self.iter_raw(): - decoded = decoder.decode(raw_bytes) - for chunk in chunker.decode(decoded): - yield chunk - decoded = decoder.flush() - for chunk in chunker.decode(decoded): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]: - """ - A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - decoder = TextDecoder(encoding=self.encoding or "utf-8") - chunker = TextChunker(chunk_size=chunk_size) - with request_context(request=self._request): - for byte_content in self.iter_bytes(): - text_content = decoder.decode(byte_content) - for chunk in chunker.decode(text_content): - yield chunk - text_content = decoder.flush() - for chunk in chunker.decode(text_content): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - def iter_lines(self) -> typing.Iterator[str]: - decoder = LineDecoder() - with request_context(request=self._request): - for text in self.iter_text(): - for line in decoder.decode(text): - yield line - for line in decoder.flush(): - yield line - - def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: - """ - A byte-iterator over the raw response content. - """ - if self.is_stream_consumed: - raise StreamConsumed() - if self.is_closed: - raise StreamClosed() - if not isinstance(self.stream, SyncByteStream): - raise RuntimeError("Attempted to call a sync iterator on an async stream.") - - self.is_stream_consumed = True - self._num_bytes_downloaded = 0 - chunker = ByteChunker(chunk_size=chunk_size) - - with request_context(request=self._request): - for raw_stream_bytes in self.stream: - self._num_bytes_downloaded += len(raw_stream_bytes) - for chunk in chunker.decode(raw_stream_bytes): - yield chunk - - for chunk in chunker.flush(): - yield chunk - - self.close() - - def close(self) -> None: - """ - Close the response and release the connection. - Automatically called if the response body is read to completion. - """ - if not isinstance(self.stream, SyncByteStream): - raise RuntimeError("Attempted to call an sync close on an async stream.") - - if not self.is_closed: - self.is_closed = True - with request_context(request=self._request): - self.stream.close() - - async def aread(self) -> bytes: - """ - Read and return the response content. - """ - if not hasattr(self, "_content"): - self._content = b"".join([part async for part in self.aiter_bytes()]) - return self._content - - async def aiter_bytes( - self, chunk_size: int | None = None - ) -> typing.AsyncIterator[bytes]: - """ - A byte-iterator over the decoded response content. - This allows us to handle gzip, deflate, brotli, and zstd encoded responses. - """ - if hasattr(self, "_content"): - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), max(chunk_size, 1)): - yield self._content[i : i + chunk_size] - else: - decoder = self._get_content_decoder() - chunker = ByteChunker(chunk_size=chunk_size) - with request_context(request=self._request): - async for raw_bytes in self.aiter_raw(): - decoded = decoder.decode(raw_bytes) - for chunk in chunker.decode(decoded): - yield chunk - decoded = decoder.flush() - for chunk in chunker.decode(decoded): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - async def aiter_text( - self, chunk_size: int | None = None - ) -> typing.AsyncIterator[str]: - """ - A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - decoder = TextDecoder(encoding=self.encoding or "utf-8") - chunker = TextChunker(chunk_size=chunk_size) - with request_context(request=self._request): - async for byte_content in self.aiter_bytes(): - text_content = decoder.decode(byte_content) - for chunk in chunker.decode(text_content): - yield chunk - text_content = decoder.flush() - for chunk in chunker.decode(text_content): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - async def aiter_lines(self) -> typing.AsyncIterator[str]: - decoder = LineDecoder() - with request_context(request=self._request): - async for text in self.aiter_text(): - for line in decoder.decode(text): - yield line - for line in decoder.flush(): - yield line - - async def aiter_raw( - self, chunk_size: int | None = None - ) -> typing.AsyncIterator[bytes]: - """ - A byte-iterator over the raw response content. - """ - if self.is_stream_consumed: - raise StreamConsumed() - if self.is_closed: - raise StreamClosed() - if not isinstance(self.stream, AsyncByteStream): - raise RuntimeError("Attempted to call an async iterator on an sync stream.") - - self.is_stream_consumed = True - self._num_bytes_downloaded = 0 - chunker = ByteChunker(chunk_size=chunk_size) - - with request_context(request=self._request): - async for raw_stream_bytes in self.stream: - self._num_bytes_downloaded += len(raw_stream_bytes) - for chunk in chunker.decode(raw_stream_bytes): - yield chunk - - for chunk in chunker.flush(): - yield chunk - - await self.aclose() - - async def aclose(self) -> None: - """ - Close the response and release the connection. - Automatically called if the response body is read to completion. - """ - if not isinstance(self.stream, AsyncByteStream): - raise RuntimeError("Attempted to call an async close on an sync stream.") - - if not self.is_closed: - self.is_closed = True - with request_context(request=self._request): - await self.stream.aclose() - - -class Cookies(typing.MutableMapping[str, str]): - """ - HTTP Cookies, as a mutable mapping. - """ - - def __init__(self, cookies: CookieTypes | None = None) -> None: - if cookies is None or isinstance(cookies, dict): - self.jar = CookieJar() - if isinstance(cookies, dict): - for key, value in cookies.items(): - self.set(key, value) - elif isinstance(cookies, list): - self.jar = CookieJar() - for key, value in cookies: - self.set(key, value) - elif isinstance(cookies, Cookies): - self.jar = CookieJar() - for cookie in cookies.jar: - self.jar.set_cookie(cookie) - else: - self.jar = cookies - - def extract_cookies(self, response: Response) -> None: - """ - Loads any cookies based on the response `Set-Cookie` headers. - """ - urllib_response = self._CookieCompatResponse(response) - urllib_request = self._CookieCompatRequest(response.request) - - self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore - - def set_cookie_header(self, request: Request) -> None: - """ - Sets an appropriate 'Cookie:' HTTP header on the `Request`. - """ - urllib_request = self._CookieCompatRequest(request) - self.jar.add_cookie_header(urllib_request) - - def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: - """ - Set a cookie value by name. May optionally include domain and path. - """ - kwargs = { - "version": 0, - "name": name, - "value": value, - "port": None, - "port_specified": False, - "domain": domain, - "domain_specified": bool(domain), - "domain_initial_dot": domain.startswith("."), - "path": path, - "path_specified": bool(path), - "secure": False, - "expires": None, - "discard": True, - "comment": None, - "comment_url": None, - "rest": {"HttpOnly": None}, - "rfc2109": False, - } - cookie = Cookie(**kwargs) # type: ignore - self.jar.set_cookie(cookie) - - def get( # type: ignore - self, - name: str, - default: str | None = None, - domain: str | None = None, - path: str | None = None, - ) -> str | None: - """ - Get a cookie by name. May optionally include domain and path - in order to specify exactly which cookie to retrieve. - """ - value = None - for cookie in self.jar: - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - if value is not None: - message = f"Multiple cookies exist with name={name}" - raise CookieConflict(message) - value = cookie.value - - if value is None: - return default - return value - - def delete( - self, - name: str, - domain: str | None = None, - path: str | None = None, - ) -> None: - """ - Delete a cookie by name. May optionally include domain and path - in order to specify exactly which cookie to delete. - """ - if domain is not None and path is not None: - return self.jar.clear(domain, path, name) - - remove = [ - cookie - for cookie in self.jar - if cookie.name == name - and (domain is None or cookie.domain == domain) - and (path is None or cookie.path == path) - ] - - for cookie in remove: - self.jar.clear(cookie.domain, cookie.path, cookie.name) - - def clear(self, domain: str | None = None, path: str | None = None) -> None: - """ - Delete all cookies. Optionally include a domain and path in - order to only delete a subset of all the cookies. - """ - args = [] - if domain is not None: - args.append(domain) - if path is not None: - assert domain is not None - args.append(path) - self.jar.clear(*args) - - def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore - cookies = Cookies(cookies) - for cookie in cookies.jar: - self.jar.set_cookie(cookie) - - def __setitem__(self, name: str, value: str) -> None: - return self.set(name, value) - - def __getitem__(self, name: str) -> str: - value = self.get(name) - if value is None: - raise KeyError(name) - return value - - def __delitem__(self, name: str) -> None: - return self.delete(name) - - def __len__(self) -> int: - return len(self.jar) - - def __iter__(self) -> typing.Iterator[str]: - return (cookie.name for cookie in self.jar) - - def __bool__(self) -> bool: - for _ in self.jar: - return True - return False - - def __repr__(self) -> str: - cookies_repr = ", ".join( - [ - f"" - for cookie in self.jar - ] - ) - - return f"" - - class _CookieCompatRequest(urllib.request.Request): - """ - Wraps a `Request` instance up in a compatibility interface suitable - for use with `CookieJar` operations. - """ - - def __init__(self, request: Request) -> None: - super().__init__( - url=str(request.url), - headers=dict(request.headers), - method=request.method, - ) - self.request = request - - def add_unredirected_header(self, key: str, value: str) -> None: - super().add_unredirected_header(key, value) - self.request.headers[key] = value - - class _CookieCompatResponse: - """ - Wraps a `Request` instance up in a compatibility interface suitable - for use with `CookieJar` operations. - """ - - def __init__(self, response: Response) -> None: - self.response = response - - def info(self) -> email.message.Message: - info = email.message.Message() - for key, value in self.response.headers.multi_items(): - # Note that setting `info[key]` here is an "append" operation, - # not a "replace" operation. - # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ - info[key] = value - return info diff --git a/venv/lib/python3.12/site-packages/httpx/_multipart.py b/venv/lib/python3.12/site-packages/httpx/_multipart.py deleted file mode 100644 index b4761af9..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_multipart.py +++ /dev/null @@ -1,300 +0,0 @@ -from __future__ import annotations - -import io -import mimetypes -import os -import re -import typing -from pathlib import Path - -from ._types import ( - AsyncByteStream, - FileContent, - FileTypes, - RequestData, - RequestFiles, - SyncByteStream, -) -from ._utils import ( - peek_filelike_length, - primitive_value_to_str, - to_bytes, -) - -_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} -_HTML5_FORM_ENCODING_REPLACEMENTS.update( - {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} -) -_HTML5_FORM_ENCODING_RE = re.compile( - r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) -) - - -def _format_form_param(name: str, value: str) -> bytes: - """ - Encode a name/value pair within a multipart form. - """ - - def replacer(match: typing.Match[str]) -> str: - return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] - - value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) - return f'{name}="{value}"'.encode() - - -def _guess_content_type(filename: str | None) -> str | None: - """ - Guesses the mimetype based on a filename. Defaults to `application/octet-stream`. - - Returns `None` if `filename` is `None` or empty. - """ - if filename: - return mimetypes.guess_type(filename)[0] or "application/octet-stream" - return None - - -def get_multipart_boundary_from_content_type( - content_type: bytes | None, -) -> bytes | None: - if not content_type or not content_type.startswith(b"multipart/form-data"): - return None - # parse boundary according to - # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 - if b";" in content_type: - for section in content_type.split(b";"): - if section.strip().lower().startswith(b"boundary="): - return section.strip()[len(b"boundary=") :].strip(b'"') - return None - - -class DataField: - """ - A single form field item, within a multipart form field. - """ - - def __init__(self, name: str, value: str | bytes | int | float | None) -> None: - if not isinstance(name, str): - raise TypeError( - f"Invalid type for name. Expected str, got {type(name)}: {name!r}" - ) - if value is not None and not isinstance(value, (str, bytes, int, float)): - raise TypeError( - "Invalid type for value. Expected primitive type," - f" got {type(value)}: {value!r}" - ) - self.name = name - self.value: str | bytes = ( - value if isinstance(value, bytes) else primitive_value_to_str(value) - ) - - def render_headers(self) -> bytes: - if not hasattr(self, "_headers"): - name = _format_form_param("name", self.name) - self._headers = b"".join( - [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] - ) - - return self._headers - - def render_data(self) -> bytes: - if not hasattr(self, "_data"): - self._data = to_bytes(self.value) - - return self._data - - def get_length(self) -> int: - headers = self.render_headers() - data = self.render_data() - return len(headers) + len(data) - - def render(self) -> typing.Iterator[bytes]: - yield self.render_headers() - yield self.render_data() - - -class FileField: - """ - A single file field item, within a multipart form field. - """ - - CHUNK_SIZE = 64 * 1024 - - def __init__(self, name: str, value: FileTypes) -> None: - self.name = name - - fileobj: FileContent - - headers: dict[str, str] = {} - content_type: str | None = None - - # This large tuple based API largely mirror's requests' API - # It would be good to think of better APIs for this that we could - # include in httpx 2.0 since variable length tuples(especially of 4 elements) - # are quite unwieldly - if isinstance(value, tuple): - if len(value) == 2: - # neither the 3rd parameter (content_type) nor the 4th (headers) - # was included - filename, fileobj = value - elif len(value) == 3: - filename, fileobj, content_type = value - else: - # all 4 parameters included - filename, fileobj, content_type, headers = value # type: ignore - else: - filename = Path(str(getattr(value, "name", "upload"))).name - fileobj = value - - if content_type is None: - content_type = _guess_content_type(filename) - - has_content_type_header = any("content-type" in key.lower() for key in headers) - if content_type is not None and not has_content_type_header: - # note that unlike requests, we ignore the content_type provided in the 3rd - # tuple element if it is also included in the headers requests does - # the opposite (it overwrites the headerwith the 3rd tuple element) - headers["Content-Type"] = content_type - - if isinstance(fileobj, io.StringIO): - raise TypeError( - "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." - ) - if isinstance(fileobj, io.TextIOBase): - raise TypeError( - "Multipart file uploads must be opened in binary mode, not text mode." - ) - - self.filename = filename - self.file = fileobj - self.headers = headers - - def get_length(self) -> int | None: - headers = self.render_headers() - - if isinstance(self.file, (str, bytes)): - return len(headers) + len(to_bytes(self.file)) - - file_length = peek_filelike_length(self.file) - - # If we can't determine the filesize without reading it into memory, - # then return `None` here, to indicate an unknown file length. - if file_length is None: - return None - - return len(headers) + file_length - - def render_headers(self) -> bytes: - if not hasattr(self, "_headers"): - parts = [ - b"Content-Disposition: form-data; ", - _format_form_param("name", self.name), - ] - if self.filename: - filename = _format_form_param("filename", self.filename) - parts.extend([b"; ", filename]) - for header_name, header_value in self.headers.items(): - key, val = f"\r\n{header_name}: ".encode(), header_value.encode() - parts.extend([key, val]) - parts.append(b"\r\n\r\n") - self._headers = b"".join(parts) - - return self._headers - - def render_data(self) -> typing.Iterator[bytes]: - if isinstance(self.file, (str, bytes)): - yield to_bytes(self.file) - return - - if hasattr(self.file, "seek"): - try: - self.file.seek(0) - except io.UnsupportedOperation: - pass - - chunk = self.file.read(self.CHUNK_SIZE) - while chunk: - yield to_bytes(chunk) - chunk = self.file.read(self.CHUNK_SIZE) - - def render(self) -> typing.Iterator[bytes]: - yield self.render_headers() - yield from self.render_data() - - -class MultipartStream(SyncByteStream, AsyncByteStream): - """ - Request content as streaming multipart encoded form data. - """ - - def __init__( - self, - data: RequestData, - files: RequestFiles, - boundary: bytes | None = None, - ) -> None: - if boundary is None: - boundary = os.urandom(16).hex().encode("ascii") - - self.boundary = boundary - self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( - "ascii" - ) - self.fields = list(self._iter_fields(data, files)) - - def _iter_fields( - self, data: RequestData, files: RequestFiles - ) -> typing.Iterator[FileField | DataField]: - for name, value in data.items(): - if isinstance(value, (tuple, list)): - for item in value: - yield DataField(name=name, value=item) - else: - yield DataField(name=name, value=value) - - file_items = files.items() if isinstance(files, typing.Mapping) else files - for name, value in file_items: - yield FileField(name=name, value=value) - - def iter_chunks(self) -> typing.Iterator[bytes]: - for field in self.fields: - yield b"--%s\r\n" % self.boundary - yield from field.render() - yield b"\r\n" - yield b"--%s--\r\n" % self.boundary - - def get_content_length(self) -> int | None: - """ - Return the length of the multipart encoded content, or `None` if - any of the files have a length that cannot be determined upfront. - """ - boundary_length = len(self.boundary) - length = 0 - - for field in self.fields: - field_length = field.get_length() - if field_length is None: - return None - - length += 2 + boundary_length + 2 # b"--{boundary}\r\n" - length += field_length - length += 2 # b"\r\n" - - length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" - return length - - # Content stream interface. - - def get_headers(self) -> dict[str, str]: - content_length = self.get_content_length() - content_type = self.content_type - if content_length is None: - return {"Transfer-Encoding": "chunked", "Content-Type": content_type} - return {"Content-Length": str(content_length), "Content-Type": content_type} - - def __iter__(self) -> typing.Iterator[bytes]: - for chunk in self.iter_chunks(): - yield chunk - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - for chunk in self.iter_chunks(): - yield chunk diff --git a/venv/lib/python3.12/site-packages/httpx/_status_codes.py b/venv/lib/python3.12/site-packages/httpx/_status_codes.py deleted file mode 100644 index 133a6231..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_status_codes.py +++ /dev/null @@ -1,162 +0,0 @@ -from __future__ import annotations - -from enum import IntEnum - -__all__ = ["codes"] - - -class codes(IntEnum): - """HTTP status codes and reason phrases - - Status codes from the following RFCs are all observed: - - * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 - * RFC 6585: Additional HTTP Status Codes - * RFC 3229: Delta encoding in HTTP - * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 - * RFC 5842: Binding Extensions to WebDAV - * RFC 7238: Permanent Redirect - * RFC 2295: Transparent Content Negotiation in HTTP - * RFC 2774: An HTTP Extension Framework - * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) - * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) - * RFC 7725: An HTTP Status Code to Report Legal Obstacles - * RFC 8297: An HTTP Status Code for Indicating Hints - * RFC 8470: Using Early Data in HTTP - """ - - def __new__(cls, value: int, phrase: str = "") -> codes: - obj = int.__new__(cls, value) - obj._value_ = value - - obj.phrase = phrase # type: ignore[attr-defined] - return obj - - def __str__(self) -> str: - return str(self.value) - - @classmethod - def get_reason_phrase(cls, value: int) -> str: - try: - return codes(value).phrase # type: ignore - except ValueError: - return "" - - @classmethod - def is_informational(cls, value: int) -> bool: - """ - Returns `True` for 1xx status codes, `False` otherwise. - """ - return 100 <= value <= 199 - - @classmethod - def is_success(cls, value: int) -> bool: - """ - Returns `True` for 2xx status codes, `False` otherwise. - """ - return 200 <= value <= 299 - - @classmethod - def is_redirect(cls, value: int) -> bool: - """ - Returns `True` for 3xx status codes, `False` otherwise. - """ - return 300 <= value <= 399 - - @classmethod - def is_client_error(cls, value: int) -> bool: - """ - Returns `True` for 4xx status codes, `False` otherwise. - """ - return 400 <= value <= 499 - - @classmethod - def is_server_error(cls, value: int) -> bool: - """ - Returns `True` for 5xx status codes, `False` otherwise. - """ - return 500 <= value <= 599 - - @classmethod - def is_error(cls, value: int) -> bool: - """ - Returns `True` for 4xx or 5xx status codes, `False` otherwise. - """ - return 400 <= value <= 599 - - # informational - CONTINUE = 100, "Continue" - SWITCHING_PROTOCOLS = 101, "Switching Protocols" - PROCESSING = 102, "Processing" - EARLY_HINTS = 103, "Early Hints" - - # success - OK = 200, "OK" - CREATED = 201, "Created" - ACCEPTED = 202, "Accepted" - NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" - NO_CONTENT = 204, "No Content" - RESET_CONTENT = 205, "Reset Content" - PARTIAL_CONTENT = 206, "Partial Content" - MULTI_STATUS = 207, "Multi-Status" - ALREADY_REPORTED = 208, "Already Reported" - IM_USED = 226, "IM Used" - - # redirection - MULTIPLE_CHOICES = 300, "Multiple Choices" - MOVED_PERMANENTLY = 301, "Moved Permanently" - FOUND = 302, "Found" - SEE_OTHER = 303, "See Other" - NOT_MODIFIED = 304, "Not Modified" - USE_PROXY = 305, "Use Proxy" - TEMPORARY_REDIRECT = 307, "Temporary Redirect" - PERMANENT_REDIRECT = 308, "Permanent Redirect" - - # client error - BAD_REQUEST = 400, "Bad Request" - UNAUTHORIZED = 401, "Unauthorized" - PAYMENT_REQUIRED = 402, "Payment Required" - FORBIDDEN = 403, "Forbidden" - NOT_FOUND = 404, "Not Found" - METHOD_NOT_ALLOWED = 405, "Method Not Allowed" - NOT_ACCEPTABLE = 406, "Not Acceptable" - PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" - REQUEST_TIMEOUT = 408, "Request Timeout" - CONFLICT = 409, "Conflict" - GONE = 410, "Gone" - LENGTH_REQUIRED = 411, "Length Required" - PRECONDITION_FAILED = 412, "Precondition Failed" - REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" - REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" - UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" - REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" - EXPECTATION_FAILED = 417, "Expectation Failed" - IM_A_TEAPOT = 418, "I'm a teapot" - MISDIRECTED_REQUEST = 421, "Misdirected Request" - UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" - LOCKED = 423, "Locked" - FAILED_DEPENDENCY = 424, "Failed Dependency" - TOO_EARLY = 425, "Too Early" - UPGRADE_REQUIRED = 426, "Upgrade Required" - PRECONDITION_REQUIRED = 428, "Precondition Required" - TOO_MANY_REQUESTS = 429, "Too Many Requests" - REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" - UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" - - # server errors - INTERNAL_SERVER_ERROR = 500, "Internal Server Error" - NOT_IMPLEMENTED = 501, "Not Implemented" - BAD_GATEWAY = 502, "Bad Gateway" - SERVICE_UNAVAILABLE = 503, "Service Unavailable" - GATEWAY_TIMEOUT = 504, "Gateway Timeout" - HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" - VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" - INSUFFICIENT_STORAGE = 507, "Insufficient Storage" - LOOP_DETECTED = 508, "Loop Detected" - NOT_EXTENDED = 510, "Not Extended" - NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" - - -# Include lower-case styles for `requests` compatibility. -for code in codes: - setattr(codes, code._name_.lower(), int(code)) diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py b/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py deleted file mode 100644 index 7a321053..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .asgi import * -from .base import * -from .default import * -from .mock import * -from .wsgi import * - -__all__ = [ - "ASGITransport", - "AsyncBaseTransport", - "BaseTransport", - "AsyncHTTPTransport", - "HTTPTransport", - "MockTransport", - "WSGITransport", -] diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 19536266..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc deleted file mode 100644 index fe68c4ba..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 345f2a34..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc deleted file mode 100644 index 50331fc1..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc deleted file mode 100644 index c2e9e8e1..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc deleted file mode 100644 index 35900107..00000000 Binary files a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py b/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py deleted file mode 100644 index 2bc4efae..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py +++ /dev/null @@ -1,187 +0,0 @@ -from __future__ import annotations - -import typing - -from .._models import Request, Response -from .._types import AsyncByteStream -from .base import AsyncBaseTransport - -if typing.TYPE_CHECKING: # pragma: no cover - import asyncio - - import trio - - Event = typing.Union[asyncio.Event, trio.Event] - - -_Message = typing.MutableMapping[str, typing.Any] -_Receive = typing.Callable[[], typing.Awaitable[_Message]] -_Send = typing.Callable[ - [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None] -] -_ASGIApp = typing.Callable[ - [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None] -] - -__all__ = ["ASGITransport"] - - -def is_running_trio() -> bool: - try: - # sniffio is a dependency of trio. - - # See https://github.com/python-trio/trio/issues/2802 - import sniffio - - if sniffio.current_async_library() == "trio": - return True - except ImportError: # pragma: nocover - pass - - return False - - -def create_event() -> Event: - if is_running_trio(): - import trio - - return trio.Event() - - import asyncio - - return asyncio.Event() - - -class ASGIResponseStream(AsyncByteStream): - def __init__(self, body: list[bytes]) -> None: - self._body = body - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - yield b"".join(self._body) - - -class ASGITransport(AsyncBaseTransport): - """ - A custom AsyncTransport that handles sending requests directly to an ASGI app. - - ```python - transport = httpx.ASGITransport( - app=app, - root_path="/submount", - client=("1.2.3.4", 123) - ) - client = httpx.AsyncClient(transport=transport) - ``` - - Arguments: - - * `app` - The ASGI application. - * `raise_app_exceptions` - Boolean indicating if exceptions in the application - should be raised. Default to `True`. Can be set to `False` for use cases - such as testing the content of a client 500 response. - * `root_path` - The root path on which the ASGI application should be mounted. - * `client` - A two-tuple indicating the client IP and port of incoming requests. - ``` - """ - - def __init__( - self, - app: _ASGIApp, - raise_app_exceptions: bool = True, - root_path: str = "", - client: tuple[str, int] = ("127.0.0.1", 123), - ) -> None: - self.app = app - self.raise_app_exceptions = raise_app_exceptions - self.root_path = root_path - self.client = client - - async def handle_async_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, AsyncByteStream) - - # ASGI scope. - scope = { - "type": "http", - "asgi": {"version": "3.0"}, - "http_version": "1.1", - "method": request.method, - "headers": [(k.lower(), v) for (k, v) in request.headers.raw], - "scheme": request.url.scheme, - "path": request.url.path, - "raw_path": request.url.raw_path.split(b"?")[0], - "query_string": request.url.query, - "server": (request.url.host, request.url.port), - "client": self.client, - "root_path": self.root_path, - } - - # Request. - request_body_chunks = request.stream.__aiter__() - request_complete = False - - # Response. - status_code = None - response_headers = None - body_parts = [] - response_started = False - response_complete = create_event() - - # ASGI callables. - - async def receive() -> dict[str, typing.Any]: - nonlocal request_complete - - if request_complete: - await response_complete.wait() - return {"type": "http.disconnect"} - - try: - body = await request_body_chunks.__anext__() - except StopAsyncIteration: - request_complete = True - return {"type": "http.request", "body": b"", "more_body": False} - return {"type": "http.request", "body": body, "more_body": True} - - async def send(message: typing.MutableMapping[str, typing.Any]) -> None: - nonlocal status_code, response_headers, response_started - - if message["type"] == "http.response.start": - assert not response_started - - status_code = message["status"] - response_headers = message.get("headers", []) - response_started = True - - elif message["type"] == "http.response.body": - assert not response_complete.is_set() - body = message.get("body", b"") - more_body = message.get("more_body", False) - - if body and request.method != "HEAD": - body_parts.append(body) - - if not more_body: - response_complete.set() - - try: - await self.app(scope, receive, send) - except Exception: # noqa: PIE-786 - if self.raise_app_exceptions: - raise - - response_complete.set() - if status_code is None: - status_code = 500 - if response_headers is None: - response_headers = {} - - assert response_complete.is_set() - assert status_code is not None - assert response_headers is not None - - stream = ASGIResponseStream(body_parts) - - return Response(status_code, headers=response_headers, stream=stream) diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/base.py b/venv/lib/python3.12/site-packages/httpx/_transports/base.py deleted file mode 100644 index 66fd99d7..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_transports/base.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import annotations - -import typing -from types import TracebackType - -from .._models import Request, Response - -T = typing.TypeVar("T", bound="BaseTransport") -A = typing.TypeVar("A", bound="AsyncBaseTransport") - -__all__ = ["AsyncBaseTransport", "BaseTransport"] - - -class BaseTransport: - def __enter__(self: T) -> T: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - self.close() - - def handle_request(self, request: Request) -> Response: - """ - Send a single HTTP request and return a response. - - Developers shouldn't typically ever need to call into this API directly, - since the Client class provides all the higher level user-facing API - niceties. - - In order to properly release any network resources, the response - stream should *either* be consumed immediately, with a call to - `response.stream.read()`, or else the `handle_request` call should - be followed with a try/finally block to ensuring the stream is - always closed. - - Example usage: - - with httpx.HTTPTransport() as transport: - req = httpx.Request( - method=b"GET", - url=(b"https", b"www.example.com", 443, b"/"), - headers=[(b"Host", b"www.example.com")], - ) - resp = transport.handle_request(req) - body = resp.stream.read() - print(resp.status_code, resp.headers, body) - - - Takes a `Request` instance as the only argument. - - Returns a `Response` instance. - """ - raise NotImplementedError( - "The 'handle_request' method must be implemented." - ) # pragma: no cover - - def close(self) -> None: - pass - - -class AsyncBaseTransport: - async def __aenter__(self: A) -> A: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - await self.aclose() - - async def handle_async_request( - self, - request: Request, - ) -> Response: - raise NotImplementedError( - "The 'handle_async_request' method must be implemented." - ) # pragma: no cover - - async def aclose(self) -> None: - pass diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/default.py b/venv/lib/python3.12/site-packages/httpx/_transports/default.py deleted file mode 100644 index d5aa05ff..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_transports/default.py +++ /dev/null @@ -1,406 +0,0 @@ -""" -Custom transports, with nicely configured defaults. - -The following additional keyword arguments are currently supported by httpcore... - -* uds: str -* local_address: str -* retries: int - -Example usages... - -# Disable HTTP/2 on a single specific domain. -mounts = { - "all://": httpx.HTTPTransport(http2=True), - "all://*example.org": httpx.HTTPTransport() -} - -# Using advanced httpcore configuration, with connection retries. -transport = httpx.HTTPTransport(retries=1) -client = httpx.Client(transport=transport) - -# Using advanced httpcore configuration, with unix domain sockets. -transport = httpx.HTTPTransport(uds="socket.uds") -client = httpx.Client(transport=transport) -""" - -from __future__ import annotations - -import contextlib -import typing -from types import TracebackType - -if typing.TYPE_CHECKING: - import ssl # pragma: no cover - - import httpx # pragma: no cover - -from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context -from .._exceptions import ( - ConnectError, - ConnectTimeout, - LocalProtocolError, - NetworkError, - PoolTimeout, - ProtocolError, - ProxyError, - ReadError, - ReadTimeout, - RemoteProtocolError, - TimeoutException, - UnsupportedProtocol, - WriteError, - WriteTimeout, -) -from .._models import Request, Response -from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream -from .._urls import URL -from .base import AsyncBaseTransport, BaseTransport - -T = typing.TypeVar("T", bound="HTTPTransport") -A = typing.TypeVar("A", bound="AsyncHTTPTransport") - -SOCKET_OPTION = typing.Union[ - typing.Tuple[int, int, int], - typing.Tuple[int, int, typing.Union[bytes, bytearray]], - typing.Tuple[int, int, None, int], -] - -__all__ = ["AsyncHTTPTransport", "HTTPTransport"] - -HTTPCORE_EXC_MAP: dict[type[Exception], type[httpx.HTTPError]] = {} - - -def _load_httpcore_exceptions() -> dict[type[Exception], type[httpx.HTTPError]]: - import httpcore - - return { - httpcore.TimeoutException: TimeoutException, - httpcore.ConnectTimeout: ConnectTimeout, - httpcore.ReadTimeout: ReadTimeout, - httpcore.WriteTimeout: WriteTimeout, - httpcore.PoolTimeout: PoolTimeout, - httpcore.NetworkError: NetworkError, - httpcore.ConnectError: ConnectError, - httpcore.ReadError: ReadError, - httpcore.WriteError: WriteError, - httpcore.ProxyError: ProxyError, - httpcore.UnsupportedProtocol: UnsupportedProtocol, - httpcore.ProtocolError: ProtocolError, - httpcore.LocalProtocolError: LocalProtocolError, - httpcore.RemoteProtocolError: RemoteProtocolError, - } - - -@contextlib.contextmanager -def map_httpcore_exceptions() -> typing.Iterator[None]: - global HTTPCORE_EXC_MAP - if len(HTTPCORE_EXC_MAP) == 0: - HTTPCORE_EXC_MAP = _load_httpcore_exceptions() - try: - yield - except Exception as exc: - mapped_exc = None - - for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): - if not isinstance(exc, from_exc): - continue - # We want to map to the most specific exception we can find. - # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to - # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. - if mapped_exc is None or issubclass(to_exc, mapped_exc): - mapped_exc = to_exc - - if mapped_exc is None: # pragma: no cover - raise - - message = str(exc) - raise mapped_exc(message) from exc - - -class ResponseStream(SyncByteStream): - def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: - self._httpcore_stream = httpcore_stream - - def __iter__(self) -> typing.Iterator[bytes]: - with map_httpcore_exceptions(): - for part in self._httpcore_stream: - yield part - - def close(self) -> None: - if hasattr(self._httpcore_stream, "close"): - self._httpcore_stream.close() - - -class HTTPTransport(BaseTransport): - def __init__( - self, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - proxy: ProxyTypes | None = None, - uds: str | None = None, - local_address: str | None = None, - retries: int = 0, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - import httpcore - - proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy - ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) - - if proxy is None: - self._pool = httpcore.ConnectionPool( - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - uds=uds, - local_address=local_address, - retries=retries, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("http", "https"): - self._pool = httpcore.HTTPProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - proxy_headers=proxy.headers.raw, - ssl_context=ssl_context, - proxy_ssl_context=proxy.ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("socks5", "socks5h"): - try: - import socksio # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using SOCKS proxy, but the 'socksio' package is not installed. " - "Make sure to install httpx using `pip install httpx[socks]`." - ) from None - - self._pool = httpcore.SOCKSProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - ) - else: # pragma: no cover - raise ValueError( - "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," - f" but got {proxy.url.scheme!r}." - ) - - def __enter__(self: T) -> T: # Use generics for subclass support. - self._pool.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - with map_httpcore_exceptions(): - self._pool.__exit__(exc_type, exc_value, traceback) - - def handle_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, SyncByteStream) - import httpcore - - req = httpcore.Request( - method=request.method, - url=httpcore.URL( - scheme=request.url.raw_scheme, - host=request.url.raw_host, - port=request.url.port, - target=request.url.raw_path, - ), - headers=request.headers.raw, - content=request.stream, - extensions=request.extensions, - ) - with map_httpcore_exceptions(): - resp = self._pool.handle_request(req) - - assert isinstance(resp.stream, typing.Iterable) - - return Response( - status_code=resp.status, - headers=resp.headers, - stream=ResponseStream(resp.stream), - extensions=resp.extensions, - ) - - def close(self) -> None: - self._pool.close() - - -class AsyncResponseStream(AsyncByteStream): - def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: - self._httpcore_stream = httpcore_stream - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - with map_httpcore_exceptions(): - async for part in self._httpcore_stream: - yield part - - async def aclose(self) -> None: - if hasattr(self._httpcore_stream, "aclose"): - await self._httpcore_stream.aclose() - - -class AsyncHTTPTransport(AsyncBaseTransport): - def __init__( - self, - verify: ssl.SSLContext | str | bool = True, - cert: CertTypes | None = None, - trust_env: bool = True, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - proxy: ProxyTypes | None = None, - uds: str | None = None, - local_address: str | None = None, - retries: int = 0, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - import httpcore - - proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy - ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) - - if proxy is None: - self._pool = httpcore.AsyncConnectionPool( - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - uds=uds, - local_address=local_address, - retries=retries, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("http", "https"): - self._pool = httpcore.AsyncHTTPProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - proxy_headers=proxy.headers.raw, - proxy_ssl_context=proxy.ssl_context, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("socks5", "socks5h"): - try: - import socksio # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using SOCKS proxy, but the 'socksio' package is not installed. " - "Make sure to install httpx using `pip install httpx[socks]`." - ) from None - - self._pool = httpcore.AsyncSOCKSProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - ) - else: # pragma: no cover - raise ValueError( - "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," - " but got {proxy.url.scheme!r}." - ) - - async def __aenter__(self: A) -> A: # Use generics for subclass support. - await self._pool.__aenter__() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - with map_httpcore_exceptions(): - await self._pool.__aexit__(exc_type, exc_value, traceback) - - async def handle_async_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, AsyncByteStream) - import httpcore - - req = httpcore.Request( - method=request.method, - url=httpcore.URL( - scheme=request.url.raw_scheme, - host=request.url.raw_host, - port=request.url.port, - target=request.url.raw_path, - ), - headers=request.headers.raw, - content=request.stream, - extensions=request.extensions, - ) - with map_httpcore_exceptions(): - resp = await self._pool.handle_async_request(req) - - assert isinstance(resp.stream, typing.AsyncIterable) - - return Response( - status_code=resp.status, - headers=resp.headers, - stream=AsyncResponseStream(resp.stream), - extensions=resp.extensions, - ) - - async def aclose(self) -> None: - await self._pool.aclose() diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/mock.py b/venv/lib/python3.12/site-packages/httpx/_transports/mock.py deleted file mode 100644 index 8c418f59..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_transports/mock.py +++ /dev/null @@ -1,43 +0,0 @@ -from __future__ import annotations - -import typing - -from .._models import Request, Response -from .base import AsyncBaseTransport, BaseTransport - -SyncHandler = typing.Callable[[Request], Response] -AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] - - -__all__ = ["MockTransport"] - - -class MockTransport(AsyncBaseTransport, BaseTransport): - def __init__(self, handler: SyncHandler | AsyncHandler) -> None: - self.handler = handler - - def handle_request( - self, - request: Request, - ) -> Response: - request.read() - response = self.handler(request) - if not isinstance(response, Response): # pragma: no cover - raise TypeError("Cannot use an async handler in a sync Client") - return response - - async def handle_async_request( - self, - request: Request, - ) -> Response: - await request.aread() - response = self.handler(request) - - # Allow handler to *optionally* be an `async` function. - # If it is, then the `response` variable need to be awaited to actually - # return the result. - - if not isinstance(response, Response): - response = await response - - return response diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py b/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py deleted file mode 100644 index 8592ffe0..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py +++ /dev/null @@ -1,149 +0,0 @@ -from __future__ import annotations - -import io -import itertools -import sys -import typing - -from .._models import Request, Response -from .._types import SyncByteStream -from .base import BaseTransport - -if typing.TYPE_CHECKING: - from _typeshed import OptExcInfo # pragma: no cover - from _typeshed.wsgi import WSGIApplication # pragma: no cover - -_T = typing.TypeVar("_T") - - -__all__ = ["WSGITransport"] - - -def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: - body = iter(body) - for chunk in body: - if chunk: - return itertools.chain([chunk], body) - return [] - - -class WSGIByteStream(SyncByteStream): - def __init__(self, result: typing.Iterable[bytes]) -> None: - self._close = getattr(result, "close", None) - self._result = _skip_leading_empty_chunks(result) - - def __iter__(self) -> typing.Iterator[bytes]: - for part in self._result: - yield part - - def close(self) -> None: - if self._close is not None: - self._close() - - -class WSGITransport(BaseTransport): - """ - A custom transport that handles sending requests directly to an WSGI app. - The simplest way to use this functionality is to use the `app` argument. - - ``` - client = httpx.Client(app=app) - ``` - - Alternatively, you can setup the transport instance explicitly. - This allows you to include any additional configuration arguments specific - to the WSGITransport class: - - ``` - transport = httpx.WSGITransport( - app=app, - script_name="/submount", - remote_addr="1.2.3.4" - ) - client = httpx.Client(transport=transport) - ``` - - Arguments: - - * `app` - The WSGI application. - * `raise_app_exceptions` - Boolean indicating if exceptions in the application - should be raised. Default to `True`. Can be set to `False` for use cases - such as testing the content of a client 500 response. - * `script_name` - The root path on which the WSGI application should be mounted. - * `remote_addr` - A string indicating the client IP of incoming requests. - ``` - """ - - def __init__( - self, - app: WSGIApplication, - raise_app_exceptions: bool = True, - script_name: str = "", - remote_addr: str = "127.0.0.1", - wsgi_errors: typing.TextIO | None = None, - ) -> None: - self.app = app - self.raise_app_exceptions = raise_app_exceptions - self.script_name = script_name - self.remote_addr = remote_addr - self.wsgi_errors = wsgi_errors - - def handle_request(self, request: Request) -> Response: - request.read() - wsgi_input = io.BytesIO(request.content) - - port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] - environ = { - "wsgi.version": (1, 0), - "wsgi.url_scheme": request.url.scheme, - "wsgi.input": wsgi_input, - "wsgi.errors": self.wsgi_errors or sys.stderr, - "wsgi.multithread": True, - "wsgi.multiprocess": False, - "wsgi.run_once": False, - "REQUEST_METHOD": request.method, - "SCRIPT_NAME": self.script_name, - "PATH_INFO": request.url.path, - "QUERY_STRING": request.url.query.decode("ascii"), - "SERVER_NAME": request.url.host, - "SERVER_PORT": str(port), - "SERVER_PROTOCOL": "HTTP/1.1", - "REMOTE_ADDR": self.remote_addr, - } - for header_key, header_value in request.headers.raw: - key = header_key.decode("ascii").upper().replace("-", "_") - if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): - key = "HTTP_" + key - environ[key] = header_value.decode("ascii") - - seen_status = None - seen_response_headers = None - seen_exc_info = None - - def start_response( - status: str, - response_headers: list[tuple[str, str]], - exc_info: OptExcInfo | None = None, - ) -> typing.Callable[[bytes], typing.Any]: - nonlocal seen_status, seen_response_headers, seen_exc_info - seen_status = status - seen_response_headers = response_headers - seen_exc_info = exc_info - return lambda _: None - - result = self.app(environ, start_response) - - stream = WSGIByteStream(result) - - assert seen_status is not None - assert seen_response_headers is not None - if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: - raise seen_exc_info[1] - - status_code = int(seen_status.split()[0]) - headers = [ - (key.encode("ascii"), value.encode("ascii")) - for key, value in seen_response_headers - ] - - return Response(status_code, headers=headers, stream=stream) diff --git a/venv/lib/python3.12/site-packages/httpx/_types.py b/venv/lib/python3.12/site-packages/httpx/_types.py deleted file mode 100644 index 704dfdff..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_types.py +++ /dev/null @@ -1,114 +0,0 @@ -""" -Type definitions for type checking purposes. -""" - -from http.cookiejar import CookieJar -from typing import ( - IO, - TYPE_CHECKING, - Any, - AsyncIterable, - AsyncIterator, - Callable, - Dict, - Iterable, - Iterator, - List, - Mapping, - Optional, - Sequence, - Tuple, - Union, -) - -if TYPE_CHECKING: # pragma: no cover - from ._auth import Auth # noqa: F401 - from ._config import Proxy, Timeout # noqa: F401 - from ._models import Cookies, Headers, Request # noqa: F401 - from ._urls import URL, QueryParams # noqa: F401 - - -PrimitiveData = Optional[Union[str, int, float, bool]] - -URLTypes = Union["URL", str] - -QueryParamTypes = Union[ - "QueryParams", - Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], - List[Tuple[str, PrimitiveData]], - Tuple[Tuple[str, PrimitiveData], ...], - str, - bytes, -] - -HeaderTypes = Union[ - "Headers", - Mapping[str, str], - Mapping[bytes, bytes], - Sequence[Tuple[str, str]], - Sequence[Tuple[bytes, bytes]], -] - -CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] - -TimeoutTypes = Union[ - Optional[float], - Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], - "Timeout", -] -ProxyTypes = Union["URL", str, "Proxy"] -CertTypes = Union[str, Tuple[str, str], Tuple[str, str, str]] - -AuthTypes = Union[ - Tuple[Union[str, bytes], Union[str, bytes]], - Callable[["Request"], "Request"], - "Auth", -] - -RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] -ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] -ResponseExtensions = Mapping[str, Any] - -RequestData = Mapping[str, Any] - -FileContent = Union[IO[bytes], bytes, str] -FileTypes = Union[ - # file (or bytes) - FileContent, - # (filename, file (or bytes)) - Tuple[Optional[str], FileContent], - # (filename, file (or bytes), content_type) - Tuple[Optional[str], FileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] -RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] - -RequestExtensions = Mapping[str, Any] - -__all__ = ["AsyncByteStream", "SyncByteStream"] - - -class SyncByteStream: - def __iter__(self) -> Iterator[bytes]: - raise NotImplementedError( - "The '__iter__' method must be implemented." - ) # pragma: no cover - yield b"" # pragma: no cover - - def close(self) -> None: - """ - Subclasses can override this method to release any network resources - after a request/response cycle is complete. - """ - - -class AsyncByteStream: - async def __aiter__(self) -> AsyncIterator[bytes]: - raise NotImplementedError( - "The '__aiter__' method must be implemented." - ) # pragma: no cover - yield b"" # pragma: no cover - - async def aclose(self) -> None: - pass diff --git a/venv/lib/python3.12/site-packages/httpx/_urlparse.py b/venv/lib/python3.12/site-packages/httpx/_urlparse.py deleted file mode 100644 index bf190fd5..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_urlparse.py +++ /dev/null @@ -1,527 +0,0 @@ -""" -An implementation of `urlparse` that provides URL validation and normalization -as described by RFC3986. - -We rely on this implementation rather than the one in Python's stdlib, because: - -* It provides more complete URL validation. -* It properly differentiates between an empty querystring and an absent querystring, - to distinguish URLs with a trailing '?'. -* It handles scheme, hostname, port, and path normalization. -* It supports IDNA hostnames, normalizing them to their encoded form. -* The API supports passing individual components, as well as the complete URL string. - -Previously we relied on the excellent `rfc3986` package to handle URL parsing and -validation, but this module provides a simpler alternative, with less indirection -required. -""" - -from __future__ import annotations - -import ipaddress -import re -import typing - -import idna - -from ._exceptions import InvalidURL - -MAX_URL_LENGTH = 65536 - -# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 -UNRESERVED_CHARACTERS = ( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" -) -SUB_DELIMS = "!$&'()*+,;=" - -PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") - -# https://url.spec.whatwg.org/#percent-encoded-bytes - -# The fragment percent-encode set is the C0 control percent-encode set -# and U+0020 SPACE, U+0022 ("), U+003C (<), U+003E (>), and U+0060 (`). -FRAG_SAFE = "".join( - [chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x3C, 0x3E, 0x60)] -) - -# The query percent-encode set is the C0 control percent-encode set -# and U+0020 SPACE, U+0022 ("), U+0023 (#), U+003C (<), and U+003E (>). -QUERY_SAFE = "".join( - [chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E)] -) - -# The path percent-encode set is the query percent-encode set -# and U+003F (?), U+0060 (`), U+007B ({), and U+007D (}). -PATH_SAFE = "".join( - [ - chr(i) - for i in range(0x20, 0x7F) - if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + (0x3F, 0x60, 0x7B, 0x7D) - ] -) - -# The userinfo percent-encode set is the path percent-encode set -# and U+002F (/), U+003A (:), U+003B (;), U+003D (=), U+0040 (@), -# U+005B ([) to U+005E (^), inclusive, and U+007C (|). -USERNAME_SAFE = "".join( - [ - chr(i) - for i in range(0x20, 0x7F) - if i - not in (0x20, 0x22, 0x23, 0x3C, 0x3E) - + (0x3F, 0x60, 0x7B, 0x7D) - + (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) - ] -) -PASSWORD_SAFE = "".join( - [ - chr(i) - for i in range(0x20, 0x7F) - if i - not in (0x20, 0x22, 0x23, 0x3C, 0x3E) - + (0x3F, 0x60, 0x7B, 0x7D) - + (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) - ] -) -# Note... The terminology 'userinfo' percent-encode set in the WHATWG document -# is used for the username and password quoting. For the joint userinfo component -# we remove U+003A (:) from the safe set. -USERINFO_SAFE = "".join( - [ - chr(i) - for i in range(0x20, 0x7F) - if i - not in (0x20, 0x22, 0x23, 0x3C, 0x3E) - + (0x3F, 0x60, 0x7B, 0x7D) - + (0x2F, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) - ] -) - - -# {scheme}: (optional) -# //{authority} (optional) -# {path} -# ?{query} (optional) -# #{fragment} (optional) -URL_REGEX = re.compile( - ( - r"(?:(?P{scheme}):)?" - r"(?://(?P{authority}))?" - r"(?P{path})" - r"(?:\?(?P{query}))?" - r"(?:#(?P{fragment}))?" - ).format( - scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", - authority="[^/?#]*", - path="[^?#]*", - query="[^#]*", - fragment=".*", - ) -) - -# {userinfo}@ (optional) -# {host} -# :{port} (optional) -AUTHORITY_REGEX = re.compile( - ( - r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" - ).format( - userinfo=".*", # Any character sequence. - host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@', - # or an IPv6 address enclosed within square brackets. - port=".*", # Any character sequence. - ) -) - - -# If we call urlparse with an individual component, then we need to regex -# validate that component individually. -# Note that we're duplicating the same strings as above. Shock! Horror!! -COMPONENT_REGEX = { - "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), - "authority": re.compile("[^/?#]*"), - "path": re.compile("[^?#]*"), - "query": re.compile("[^#]*"), - "fragment": re.compile(".*"), - "userinfo": re.compile("[^@]*"), - "host": re.compile("(\\[.*\\]|[^:]*)"), - "port": re.compile(".*"), -} - - -# We use these simple regexs as a first pass before handing off to -# the stdlib 'ipaddress' module for IP address validation. -IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") -IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") - - -class ParseResult(typing.NamedTuple): - scheme: str - userinfo: str - host: str - port: int | None - path: str - query: str | None - fragment: str | None - - @property - def authority(self) -> str: - return "".join( - [ - f"{self.userinfo}@" if self.userinfo else "", - f"[{self.host}]" if ":" in self.host else self.host, - f":{self.port}" if self.port is not None else "", - ] - ) - - @property - def netloc(self) -> str: - return "".join( - [ - f"[{self.host}]" if ":" in self.host else self.host, - f":{self.port}" if self.port is not None else "", - ] - ) - - def copy_with(self, **kwargs: str | None) -> ParseResult: - if not kwargs: - return self - - defaults = { - "scheme": self.scheme, - "authority": self.authority, - "path": self.path, - "query": self.query, - "fragment": self.fragment, - } - defaults.update(kwargs) - return urlparse("", **defaults) - - def __str__(self) -> str: - authority = self.authority - return "".join( - [ - f"{self.scheme}:" if self.scheme else "", - f"//{authority}" if authority else "", - self.path, - f"?{self.query}" if self.query is not None else "", - f"#{self.fragment}" if self.fragment is not None else "", - ] - ) - - -def urlparse(url: str = "", **kwargs: str | None) -> ParseResult: - # Initial basic checks on allowable URLs. - # --------------------------------------- - - # Hard limit the maximum allowable URL length. - if len(url) > MAX_URL_LENGTH: - raise InvalidURL("URL too long") - - # If a URL includes any ASCII control characters including \t, \r, \n, - # then treat it as invalid. - if any(char.isascii() and not char.isprintable() for char in url): - char = next(char for char in url if char.isascii() and not char.isprintable()) - idx = url.find(char) - error = ( - f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}." - ) - raise InvalidURL(error) - - # Some keyword arguments require special handling. - # ------------------------------------------------ - - # Coerce "port" to a string, if it is provided as an integer. - if "port" in kwargs: - port = kwargs["port"] - kwargs["port"] = str(port) if isinstance(port, int) else port - - # Replace "netloc" with "host and "port". - if "netloc" in kwargs: - netloc = kwargs.pop("netloc") or "" - kwargs["host"], _, kwargs["port"] = netloc.partition(":") - - # Replace "username" and/or "password" with "userinfo". - if "username" in kwargs or "password" in kwargs: - username = quote(kwargs.pop("username", "") or "", safe=USERNAME_SAFE) - password = quote(kwargs.pop("password", "") or "", safe=PASSWORD_SAFE) - kwargs["userinfo"] = f"{username}:{password}" if password else username - - # Replace "raw_path" with "path" and "query". - if "raw_path" in kwargs: - raw_path = kwargs.pop("raw_path") or "" - kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") - if not seperator: - kwargs["query"] = None - - # Ensure that IPv6 "host" addresses are always escaped with "[...]". - if "host" in kwargs: - host = kwargs.get("host") or "" - if ":" in host and not (host.startswith("[") and host.endswith("]")): - kwargs["host"] = f"[{host}]" - - # If any keyword arguments are provided, ensure they are valid. - # ------------------------------------------------------------- - - for key, value in kwargs.items(): - if value is not None: - if len(value) > MAX_URL_LENGTH: - raise InvalidURL(f"URL component '{key}' too long") - - # If a component includes any ASCII control characters including \t, \r, \n, - # then treat it as invalid. - if any(char.isascii() and not char.isprintable() for char in value): - char = next( - char for char in value if char.isascii() and not char.isprintable() - ) - idx = value.find(char) - error = ( - f"Invalid non-printable ASCII character in URL {key} component, " - f"{char!r} at position {idx}." - ) - raise InvalidURL(error) - - # Ensure that keyword arguments match as a valid regex. - if not COMPONENT_REGEX[key].fullmatch(value): - raise InvalidURL(f"Invalid URL component '{key}'") - - # The URL_REGEX will always match, but may have empty components. - url_match = URL_REGEX.match(url) - assert url_match is not None - url_dict = url_match.groupdict() - - # * 'scheme', 'authority', and 'path' may be empty strings. - # * 'query' may be 'None', indicating no trailing "?" portion. - # Any string including the empty string, indicates a trailing "?". - # * 'fragment' may be 'None', indicating no trailing "#" portion. - # Any string including the empty string, indicates a trailing "#". - scheme = kwargs.get("scheme", url_dict["scheme"]) or "" - authority = kwargs.get("authority", url_dict["authority"]) or "" - path = kwargs.get("path", url_dict["path"]) or "" - query = kwargs.get("query", url_dict["query"]) - frag = kwargs.get("fragment", url_dict["fragment"]) - - # The AUTHORITY_REGEX will always match, but may have empty components. - authority_match = AUTHORITY_REGEX.match(authority) - assert authority_match is not None - authority_dict = authority_match.groupdict() - - # * 'userinfo' and 'host' may be empty strings. - # * 'port' may be 'None'. - userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" - host = kwargs.get("host", authority_dict["host"]) or "" - port = kwargs.get("port", authority_dict["port"]) - - # Normalize and validate each component. - # We end up with a parsed representation of the URL, - # with components that are plain ASCII bytestrings. - parsed_scheme: str = scheme.lower() - parsed_userinfo: str = quote(userinfo, safe=USERINFO_SAFE) - parsed_host: str = encode_host(host) - parsed_port: int | None = normalize_port(port, scheme) - - has_scheme = parsed_scheme != "" - has_authority = ( - parsed_userinfo != "" or parsed_host != "" or parsed_port is not None - ) - validate_path(path, has_scheme=has_scheme, has_authority=has_authority) - if has_scheme or has_authority: - path = normalize_path(path) - - parsed_path: str = quote(path, safe=PATH_SAFE) - parsed_query: str | None = None if query is None else quote(query, safe=QUERY_SAFE) - parsed_frag: str | None = None if frag is None else quote(frag, safe=FRAG_SAFE) - - # The parsed ASCII bytestrings are our canonical form. - # All properties of the URL are derived from these. - return ParseResult( - parsed_scheme, - parsed_userinfo, - parsed_host, - parsed_port, - parsed_path, - parsed_query, - parsed_frag, - ) - - -def encode_host(host: str) -> str: - if not host: - return "" - - elif IPv4_STYLE_HOSTNAME.match(host): - # Validate IPv4 hostnames like #.#.#.# - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet - try: - ipaddress.IPv4Address(host) - except ipaddress.AddressValueError: - raise InvalidURL(f"Invalid IPv4 address: {host!r}") - return host - - elif IPv6_STYLE_HOSTNAME.match(host): - # Validate IPv6 hostnames like [...] - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # "A host identified by an Internet Protocol literal address, version 6 - # [RFC3513] or later, is distinguished by enclosing the IP literal - # within square brackets ("[" and "]"). This is the only place where - # square bracket characters are allowed in the URI syntax." - try: - ipaddress.IPv6Address(host[1:-1]) - except ipaddress.AddressValueError: - raise InvalidURL(f"Invalid IPv6 address: {host!r}") - return host[1:-1] - - elif host.isascii(): - # Regular ASCII hostnames - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # reg-name = *( unreserved / pct-encoded / sub-delims ) - WHATWG_SAFE = '"`{}%|\\' - return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE) - - # IDNA hostnames - try: - return idna.encode(host.lower()).decode("ascii") - except idna.IDNAError: - raise InvalidURL(f"Invalid IDNA hostname: {host!r}") - - -def normalize_port(port: str | int | None, scheme: str) -> int | None: - # From https://tools.ietf.org/html/rfc3986#section-3.2.3 - # - # "A scheme may define a default port. For example, the "http" scheme - # defines a default port of "80", corresponding to its reserved TCP - # port number. The type of port designated by the port number (e.g., - # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and - # normalizers should omit the port component and its ":" delimiter if - # port is empty or if its value would be the same as that of the - # scheme's default." - if port is None or port == "": - return None - - try: - port_as_int = int(port) - except ValueError: - raise InvalidURL(f"Invalid port: {port!r}") - - # See https://url.spec.whatwg.org/#url-miscellaneous - default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( - scheme - ) - if port_as_int == default_port: - return None - return port_as_int - - -def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: - """ - Path validation rules that depend on if the URL contains - a scheme or authority component. - - See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 - """ - if has_authority: - # If a URI contains an authority component, then the path component - # must either be empty or begin with a slash ("/") character." - if path and not path.startswith("/"): - raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") - - if not has_scheme and not has_authority: - # If a URI does not contain an authority component, then the path cannot begin - # with two slash characters ("//"). - if path.startswith("//"): - raise InvalidURL("Relative URLs cannot have a path starting with '//'") - - # In addition, a URI reference (Section 4.1) may be a relative-path reference, - # in which case the first path segment cannot contain a colon (":") character. - if path.startswith(":"): - raise InvalidURL("Relative URLs cannot have a path starting with ':'") - - -def normalize_path(path: str) -> str: - """ - Drop "." and ".." segments from a URL path. - - For example: - - normalize_path("/path/./to/somewhere/..") == "/path/to" - """ - # Fast return when no '.' characters in the path. - if "." not in path: - return path - - components = path.split("/") - - # Fast return when no '.' or '..' components in the path. - if "." not in components and ".." not in components: - return path - - # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 - output: list[str] = [] - for component in components: - if component == ".": - pass - elif component == "..": - if output and output != [""]: - output.pop() - else: - output.append(component) - return "/".join(output) - - -def PERCENT(string: str) -> str: - return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")]) - - -def percent_encoded(string: str, safe: str) -> str: - """ - Use percent-encoding to quote a string. - """ - NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe - - # Fast path for strings that don't need escaping. - if not string.rstrip(NON_ESCAPED_CHARS): - return string - - return "".join( - [char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string] - ) - - -def quote(string: str, safe: str) -> str: - """ - Use percent-encoding to quote a string, omitting existing '%xx' escape sequences. - - See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1 - - * `string`: The string to be percent-escaped. - * `safe`: A string containing characters that may be treated as safe, and do not - need to be escaped. Unreserved characters are always treated as safe. - See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3 - """ - parts = [] - current_position = 0 - for match in re.finditer(PERCENT_ENCODED_REGEX, string): - start_position, end_position = match.start(), match.end() - matched_text = match.group(0) - # Add any text up to the '%xx' escape sequence. - if start_position != current_position: - leading_text = string[current_position:start_position] - parts.append(percent_encoded(leading_text, safe=safe)) - - # Add the '%xx' escape sequence. - parts.append(matched_text) - current_position = end_position - - # Add any text after the final '%xx' escape sequence. - if current_position != len(string): - trailing_text = string[current_position:] - parts.append(percent_encoded(trailing_text, safe=safe)) - - return "".join(parts) diff --git a/venv/lib/python3.12/site-packages/httpx/_urls.py b/venv/lib/python3.12/site-packages/httpx/_urls.py deleted file mode 100644 index 147a8fa3..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_urls.py +++ /dev/null @@ -1,641 +0,0 @@ -from __future__ import annotations - -import typing -from urllib.parse import parse_qs, unquote, urlencode - -import idna - -from ._types import QueryParamTypes -from ._urlparse import urlparse -from ._utils import primitive_value_to_str - -__all__ = ["URL", "QueryParams"] - - -class URL: - """ - url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") - - assert url.scheme == "https" - assert url.username == "jo@email.com" - assert url.password == "a secret" - assert url.userinfo == b"jo%40email.com:a%20secret" - assert url.host == "müller.de" - assert url.raw_host == b"xn--mller-kva.de" - assert url.port == 1234 - assert url.netloc == b"xn--mller-kva.de:1234" - assert url.path == "/pa th" - assert url.query == b"?search=ab" - assert url.raw_path == b"/pa%20th?search=ab" - assert url.fragment == "anchorlink" - - The components of a URL are broken down like this: - - https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink - [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] - [ userinfo ] [ netloc ][ raw_path ] - - Note that: - - * `url.scheme` is normalized to always be lowercased. - - * `url.host` is normalized to always be lowercased. Internationalized domain - names are represented in unicode, without IDNA encoding applied. For instance: - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - * `url.port` is either None or an integer. URLs that include the default port for - "http", "https", "ws", "wss", and "ftp" schemes have their port - normalized to `None`. - - assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") - assert httpx.URL("http://example.com").port is None - assert httpx.URL("http://example.com:80").port is None - - * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work - with `url.username` and `url.password` instead, which handle the URL escaping. - - * `url.raw_path` is raw bytes of both the path and query, without URL escaping. - This portion is used as the target when constructing HTTP requests. Usually you'll - want to work with `url.path` instead. - - * `url.query` is raw bytes, without URL escaping. A URL query string portion can - only be properly URL escaped when decoding the parameter names and values - themselves. - """ - - def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None: - if kwargs: - allowed = { - "scheme": str, - "username": str, - "password": str, - "userinfo": bytes, - "host": str, - "port": int, - "netloc": bytes, - "path": str, - "query": bytes, - "raw_path": bytes, - "fragment": str, - "params": object, - } - - # Perform type checking for all supported keyword arguments. - for key, value in kwargs.items(): - if key not in allowed: - message = f"{key!r} is an invalid keyword argument for URL()" - raise TypeError(message) - if value is not None and not isinstance(value, allowed[key]): - expected = allowed[key].__name__ - seen = type(value).__name__ - message = f"Argument {key!r} must be {expected} but got {seen}" - raise TypeError(message) - if isinstance(value, bytes): - kwargs[key] = value.decode("ascii") - - if "params" in kwargs: - # Replace any "params" keyword with the raw "query" instead. - # - # Ensure that empty params use `kwargs["query"] = None` rather - # than `kwargs["query"] = ""`, so that generated URLs do not - # include an empty trailing "?". - params = kwargs.pop("params") - kwargs["query"] = None if not params else str(QueryParams(params)) - - if isinstance(url, str): - self._uri_reference = urlparse(url, **kwargs) - elif isinstance(url, URL): - self._uri_reference = url._uri_reference.copy_with(**kwargs) - else: - raise TypeError( - "Invalid type for url. Expected str or httpx.URL," - f" got {type(url)}: {url!r}" - ) - - @property - def scheme(self) -> str: - """ - The URL scheme, such as "http", "https". - Always normalised to lowercase. - """ - return self._uri_reference.scheme - - @property - def raw_scheme(self) -> bytes: - """ - The raw bytes representation of the URL scheme, such as b"http", b"https". - Always normalised to lowercase. - """ - return self._uri_reference.scheme.encode("ascii") - - @property - def userinfo(self) -> bytes: - """ - The URL userinfo as a raw bytestring. - For example: b"jo%40email.com:a%20secret". - """ - return self._uri_reference.userinfo.encode("ascii") - - @property - def username(self) -> str: - """ - The URL username as a string, with URL decoding applied. - For example: "jo@email.com" - """ - userinfo = self._uri_reference.userinfo - return unquote(userinfo.partition(":")[0]) - - @property - def password(self) -> str: - """ - The URL password as a string, with URL decoding applied. - For example: "a secret" - """ - userinfo = self._uri_reference.userinfo - return unquote(userinfo.partition(":")[2]) - - @property - def host(self) -> str: - """ - The URL host as a string. - Always normalized to lowercase, with IDNA hosts decoded into unicode. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.host == "www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.host == "::ffff:192.168.0.1" - """ - host: str = self._uri_reference.host - - if host.startswith("xn--"): - host = idna.decode(host) - - return host - - @property - def raw_host(self) -> bytes: - """ - The raw bytes representation of the URL host. - Always normalized to lowercase, and IDNA encoded. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.raw_host == b"www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.raw_host == b"::ffff:192.168.0.1" - """ - return self._uri_reference.host.encode("ascii") - - @property - def port(self) -> int | None: - """ - The URL port as an integer. - - Note that the URL class performs port normalization as per the WHATWG spec. - Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always - treated as `None`. - - For example: - - assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") - assert httpx.URL("http://www.example.com:80").port is None - """ - return self._uri_reference.port - - @property - def netloc(self) -> bytes: - """ - Either `` or `:` as bytes. - Always normalized to lowercase, and IDNA encoded. - - This property may be used for generating the value of a request - "Host" header. - """ - return self._uri_reference.netloc.encode("ascii") - - @property - def path(self) -> str: - """ - The URL path as a string. Excluding the query string, and URL decoded. - - For example: - - url = httpx.URL("https://example.com/pa%20th") - assert url.path == "/pa th" - """ - path = self._uri_reference.path or "/" - return unquote(path) - - @property - def query(self) -> bytes: - """ - The URL query string, as raw bytes, excluding the leading b"?". - - This is necessarily a bytewise interface, because we cannot - perform URL decoding of this representation until we've parsed - the keys and values into a QueryParams instance. - - For example: - - url = httpx.URL("https://example.com/?filter=some%20search%20terms") - assert url.query == b"filter=some%20search%20terms" - """ - query = self._uri_reference.query or "" - return query.encode("ascii") - - @property - def params(self) -> QueryParams: - """ - The URL query parameters, neatly parsed and packaged into an immutable - multidict representation. - """ - return QueryParams(self._uri_reference.query) - - @property - def raw_path(self) -> bytes: - """ - The complete URL path and query string as raw bytes. - Used as the target when constructing HTTP requests. - - For example: - - GET /users?search=some%20text HTTP/1.1 - Host: www.example.org - Connection: close - """ - path = self._uri_reference.path or "/" - if self._uri_reference.query is not None: - path += "?" + self._uri_reference.query - return path.encode("ascii") - - @property - def fragment(self) -> str: - """ - The URL fragments, as used in HTML anchors. - As a string, without the leading '#'. - """ - return unquote(self._uri_reference.fragment or "") - - @property - def is_absolute_url(self) -> bool: - """ - Return `True` for absolute URLs such as 'http://example.com/path', - and `False` for relative URLs such as '/path'. - """ - # We don't use `.is_absolute` from `rfc3986` because it treats - # URLs with a fragment portion as not absolute. - # What we actually care about is if the URL provides - # a scheme and hostname to which connections should be made. - return bool(self._uri_reference.scheme and self._uri_reference.host) - - @property - def is_relative_url(self) -> bool: - """ - Return `False` for absolute URLs such as 'http://example.com/path', - and `True` for relative URLs such as '/path'. - """ - return not self.is_absolute_url - - def copy_with(self, **kwargs: typing.Any) -> URL: - """ - Copy this URL, returning a new URL with some components altered. - Accepts the same set of parameters as the components that are made - available via properties on the `URL` class. - - For example: - - url = httpx.URL("https://www.example.com").copy_with( - username="jo@gmail.com", password="a secret" - ) - assert url == "https://jo%40email.com:a%20secret@www.example.com" - """ - return URL(self, **kwargs) - - def copy_set_param(self, key: str, value: typing.Any = None) -> URL: - return self.copy_with(params=self.params.set(key, value)) - - def copy_add_param(self, key: str, value: typing.Any = None) -> URL: - return self.copy_with(params=self.params.add(key, value)) - - def copy_remove_param(self, key: str) -> URL: - return self.copy_with(params=self.params.remove(key)) - - def copy_merge_params(self, params: QueryParamTypes) -> URL: - return self.copy_with(params=self.params.merge(params)) - - def join(self, url: URL | str) -> URL: - """ - Return an absolute URL, using this URL as the base. - - Eg. - - url = httpx.URL("https://www.example.com/test") - url = url.join("/new/path") - assert url == "https://www.example.com/new/path" - """ - from urllib.parse import urljoin - - return URL(urljoin(str(self), str(URL(url)))) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - return isinstance(other, (URL, str)) and str(self) == str(URL(other)) - - def __str__(self) -> str: - return str(self._uri_reference) - - def __repr__(self) -> str: - scheme, userinfo, host, port, path, query, fragment = self._uri_reference - - if ":" in userinfo: - # Mask any password component. - userinfo = f'{userinfo.split(":")[0]}:[secure]' - - authority = "".join( - [ - f"{userinfo}@" if userinfo else "", - f"[{host}]" if ":" in host else host, - f":{port}" if port is not None else "", - ] - ) - url = "".join( - [ - f"{self.scheme}:" if scheme else "", - f"//{authority}" if authority else "", - path, - f"?{query}" if query is not None else "", - f"#{fragment}" if fragment is not None else "", - ] - ) - - return f"{self.__class__.__name__}({url!r})" - - @property - def raw(self) -> tuple[bytes, bytes, int, bytes]: # pragma: nocover - import collections - import warnings - - warnings.warn("URL.raw is deprecated.") - RawURL = collections.namedtuple( - "RawURL", ["raw_scheme", "raw_host", "port", "raw_path"] - ) - return RawURL( - raw_scheme=self.raw_scheme, - raw_host=self.raw_host, - port=self.port, - raw_path=self.raw_path, - ) - - -class QueryParams(typing.Mapping[str, str]): - """ - URL query parameters, as a multi-dict. - """ - - def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: - assert len(args) < 2, "Too many arguments." - assert not (args and kwargs), "Cannot mix named and unnamed arguments." - - value = args[0] if args else kwargs - - if value is None or isinstance(value, (str, bytes)): - value = value.decode("ascii") if isinstance(value, bytes) else value - self._dict = parse_qs(value, keep_blank_values=True) - elif isinstance(value, QueryParams): - self._dict = {k: list(v) for k, v in value._dict.items()} - else: - dict_value: dict[typing.Any, list[typing.Any]] = {} - if isinstance(value, (list, tuple)): - # Convert list inputs like: - # [("a", "123"), ("a", "456"), ("b", "789")] - # To a dict representation, like: - # {"a": ["123", "456"], "b": ["789"]} - for item in value: - dict_value.setdefault(item[0], []).append(item[1]) - else: - # Convert dict inputs like: - # {"a": "123", "b": ["456", "789"]} - # To dict inputs where values are always lists, like: - # {"a": ["123"], "b": ["456", "789"]} - dict_value = { - k: list(v) if isinstance(v, (list, tuple)) else [v] - for k, v in value.items() - } - - # Ensure that keys and values are neatly coerced to strings. - # We coerce values `True` and `False` to JSON-like "true" and "false" - # representations, and coerce `None` values to the empty string. - self._dict = { - str(k): [primitive_value_to_str(item) for item in v] - for k, v in dict_value.items() - } - - def keys(self) -> typing.KeysView[str]: - """ - Return all the keys in the query params. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.keys()) == ["a", "b"] - """ - return self._dict.keys() - - def values(self) -> typing.ValuesView[str]: - """ - Return all the values in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.values()) == ["123", "789"] - """ - return {k: v[0] for k, v in self._dict.items()}.values() - - def items(self) -> typing.ItemsView[str, str]: - """ - Return all items in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.items()) == [("a", "123"), ("b", "789")] - """ - return {k: v[0] for k, v in self._dict.items()}.items() - - def multi_items(self) -> list[tuple[str, str]]: - """ - Return all items in the query params. Allow duplicate keys to occur. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] - """ - multi_items: list[tuple[str, str]] = [] - for k, v in self._dict.items(): - multi_items.extend([(k, i) for i in v]) - return multi_items - - def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: - """ - Get a value from the query param for a given key. If the key occurs - more than once, then only the first value is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get("a") == "123" - """ - if key in self._dict: - return self._dict[str(key)][0] - return default - - def get_list(self, key: str) -> list[str]: - """ - Get all values from the query param for a given key. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get_list("a") == ["123", "456"] - """ - return list(self._dict.get(str(key), [])) - - def set(self, key: str, value: typing.Any = None) -> QueryParams: - """ - Return a new QueryParams instance, setting the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.set("a", "456") - assert q == httpx.QueryParams("a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = [primitive_value_to_str(value)] - return q - - def add(self, key: str, value: typing.Any = None) -> QueryParams: - """ - Return a new QueryParams instance, setting or appending the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.add("a", "456") - assert q == httpx.QueryParams("a=123&a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] - return q - - def remove(self, key: str) -> QueryParams: - """ - Return a new QueryParams instance, removing the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.remove("a") - assert q == httpx.QueryParams("") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict.pop(str(key), None) - return q - - def merge(self, params: QueryParamTypes | None = None) -> QueryParams: - """ - Return a new QueryParams instance, updated with. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.merge({"b": "456"}) - assert q == httpx.QueryParams("a=123&b=456") - - q = httpx.QueryParams("a=123") - q = q.merge({"a": "456", "b": "789"}) - assert q == httpx.QueryParams("a=456&b=789") - """ - q = QueryParams(params) - q._dict = {**self._dict, **q._dict} - return q - - def __getitem__(self, key: typing.Any) -> str: - return self._dict[key][0] - - def __contains__(self, key: typing.Any) -> bool: - return key in self._dict - - def __iter__(self) -> typing.Iterator[typing.Any]: - return iter(self.keys()) - - def __len__(self) -> int: - return len(self._dict) - - def __bool__(self) -> bool: - return bool(self._dict) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - if not isinstance(other, self.__class__): - return False - return sorted(self.multi_items()) == sorted(other.multi_items()) - - def __str__(self) -> str: - return urlencode(self.multi_items()) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - query_string = str(self) - return f"{class_name}({query_string!r})" - - def update(self, params: QueryParamTypes | None = None) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.merge(...)` to create an updated copy." - ) - - def __setitem__(self, key: str, value: str) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.set(key, value)` to create an updated copy." - ) diff --git a/venv/lib/python3.12/site-packages/httpx/_utils.py b/venv/lib/python3.12/site-packages/httpx/_utils.py deleted file mode 100644 index 7fe827da..00000000 --- a/venv/lib/python3.12/site-packages/httpx/_utils.py +++ /dev/null @@ -1,242 +0,0 @@ -from __future__ import annotations - -import ipaddress -import os -import re -import typing -from urllib.request import getproxies - -from ._types import PrimitiveData - -if typing.TYPE_CHECKING: # pragma: no cover - from ._urls import URL - - -def primitive_value_to_str(value: PrimitiveData) -> str: - """ - Coerce a primitive data type into a string value. - - Note that we prefer JSON-style 'true'/'false' for boolean values here. - """ - if value is True: - return "true" - elif value is False: - return "false" - elif value is None: - return "" - return str(value) - - -def get_environment_proxies() -> dict[str, str | None]: - """Gets proxy information from the environment""" - - # urllib.request.getproxies() falls back on System - # Registry and Config for proxies on Windows and macOS. - # We don't want to propagate non-HTTP proxies into - # our configuration such as 'TRAVIS_APT_PROXY'. - proxy_info = getproxies() - mounts: dict[str, str | None] = {} - - for scheme in ("http", "https", "all"): - if proxy_info.get(scheme): - hostname = proxy_info[scheme] - mounts[f"{scheme}://"] = ( - hostname if "://" in hostname else f"http://{hostname}" - ) - - no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] - for hostname in no_proxy_hosts: - # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details - # on how names in `NO_PROXY` are handled. - if hostname == "*": - # If NO_PROXY=* is used or if "*" occurs as any one of the comma - # separated hostnames, then we should just bypass any information - # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore - # proxies. - return {} - elif hostname: - # NO_PROXY=.google.com is marked as "all://*.google.com, - # which disables "www.google.com" but not "google.com" - # NO_PROXY=google.com is marked as "all://*google.com, - # which disables "www.google.com" and "google.com". - # (But not "wwwgoogle.com") - # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" - # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 - if "://" in hostname: - mounts[hostname] = None - elif is_ipv4_hostname(hostname): - mounts[f"all://{hostname}"] = None - elif is_ipv6_hostname(hostname): - mounts[f"all://[{hostname}]"] = None - elif hostname.lower() == "localhost": - mounts[f"all://{hostname}"] = None - else: - mounts[f"all://*{hostname}"] = None - - return mounts - - -def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes: - return value.encode(encoding) if isinstance(value, str) else value - - -def to_str(value: str | bytes, encoding: str = "utf-8") -> str: - return value if isinstance(value, str) else value.decode(encoding) - - -def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: - return value if isinstance(match_type_of, str) else value.encode() - - -def unquote(value: str) -> str: - return value[1:-1] if value[0] == value[-1] == '"' else value - - -def peek_filelike_length(stream: typing.Any) -> int | None: - """ - Given a file-like stream object, return its length in number of bytes - without reading it into memory. - """ - try: - # Is it an actual file? - fd = stream.fileno() - # Yup, seems to be an actual file. - length = os.fstat(fd).st_size - except (AttributeError, OSError): - # No... Maybe it's something that supports random access, like `io.BytesIO`? - try: - # Assuming so, go to end of stream to figure out its length, - # then put it back in place. - offset = stream.tell() - length = stream.seek(0, os.SEEK_END) - stream.seek(offset) - except (AttributeError, OSError): - # Not even that? Sorry, we're doomed... - return None - - return length - - -class URLPattern: - """ - A utility class currently used for making lookups against proxy keys... - - # Wildcard matching... - >>> pattern = URLPattern("all://") - >>> pattern.matches(httpx.URL("http://example.com")) - True - - # Witch scheme matching... - >>> pattern = URLPattern("https://") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - False - - # With domain matching... - >>> pattern = URLPattern("https://example.com") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - False - >>> pattern.matches(httpx.URL("https://other.com")) - False - - # Wildcard scheme, with domain matching... - >>> pattern = URLPattern("all://example.com") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - True - >>> pattern.matches(httpx.URL("https://other.com")) - False - - # With port matching... - >>> pattern = URLPattern("https://example.com:1234") - >>> pattern.matches(httpx.URL("https://example.com:1234")) - True - >>> pattern.matches(httpx.URL("https://example.com")) - False - """ - - def __init__(self, pattern: str) -> None: - from ._urls import URL - - if pattern and ":" not in pattern: - raise ValueError( - f"Proxy keys should use proper URL forms rather " - f"than plain scheme strings. " - f'Instead of "{pattern}", use "{pattern}://"' - ) - - url = URL(pattern) - self.pattern = pattern - self.scheme = "" if url.scheme == "all" else url.scheme - self.host = "" if url.host == "*" else url.host - self.port = url.port - if not url.host or url.host == "*": - self.host_regex: typing.Pattern[str] | None = None - elif url.host.startswith("*."): - # *.example.com should match "www.example.com", but not "example.com" - domain = re.escape(url.host[2:]) - self.host_regex = re.compile(f"^.+\\.{domain}$") - elif url.host.startswith("*"): - # *example.com should match "www.example.com" and "example.com" - domain = re.escape(url.host[1:]) - self.host_regex = re.compile(f"^(.+\\.)?{domain}$") - else: - # example.com should match "example.com" but not "www.example.com" - domain = re.escape(url.host) - self.host_regex = re.compile(f"^{domain}$") - - def matches(self, other: URL) -> bool: - if self.scheme and self.scheme != other.scheme: - return False - if ( - self.host - and self.host_regex is not None - and not self.host_regex.match(other.host) - ): - return False - if self.port is not None and self.port != other.port: - return False - return True - - @property - def priority(self) -> tuple[int, int, int]: - """ - The priority allows URLPattern instances to be sortable, so that - we can match from most specific to least specific. - """ - # URLs with a port should take priority over URLs without a port. - port_priority = 0 if self.port is not None else 1 - # Longer hostnames should match first. - host_priority = -len(self.host) - # Longer schemes should match first. - scheme_priority = -len(self.scheme) - return (port_priority, host_priority, scheme_priority) - - def __hash__(self) -> int: - return hash(self.pattern) - - def __lt__(self, other: URLPattern) -> bool: - return self.priority < other.priority - - def __eq__(self, other: typing.Any) -> bool: - return isinstance(other, URLPattern) and self.pattern == other.pattern - - -def is_ipv4_hostname(hostname: str) -> bool: - try: - ipaddress.IPv4Address(hostname.split("/")[0]) - except Exception: - return False - return True - - -def is_ipv6_hostname(hostname: str) -> bool: - try: - ipaddress.IPv6Address(hostname.split("/")[0]) - except Exception: - return False - return True diff --git a/venv/lib/python3.12/site-packages/httpx/py.typed b/venv/lib/python3.12/site-packages/httpx/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/idna-3.10.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/LICENSE.md b/venv/lib/python3.12/site-packages/idna-3.10.dist-info/LICENSE.md deleted file mode 100644 index 19b6b452..00000000 --- a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/LICENSE.md +++ /dev/null @@ -1,31 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2013-2024, Kim Davies and contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/METADATA b/venv/lib/python3.12/site-packages/idna-3.10.dist-info/METADATA deleted file mode 100644 index c42623e9..00000000 --- a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/METADATA +++ /dev/null @@ -1,250 +0,0 @@ -Metadata-Version: 2.1 -Name: idna -Version: 3.10 -Summary: Internationalized Domain Names in Applications (IDNA) -Author-email: Kim Davies -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Utilities -Requires-Dist: ruff >= 0.6.2 ; extra == "all" -Requires-Dist: mypy >= 1.11.2 ; extra == "all" -Requires-Dist: pytest >= 8.3.2 ; extra == "all" -Requires-Dist: flake8 >= 7.1.1 ; extra == "all" -Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst -Project-URL: Issue tracker, https://github.com/kjd/idna/issues -Project-URL: Source, https://github.com/kjd/idna -Provides-Extra: all - -Internationalized Domain Names in Applications (IDNA) -===================================================== - -Support for the Internationalized Domain Names in -Applications (IDNA) protocol as specified in `RFC 5891 -`_. This is the latest version of -the protocol and is sometimes referred to as “IDNA 2008”. - -This library also provides support for Unicode Technical -Standard 46, `Unicode IDNA Compatibility Processing -`_. - -This acts as a suitable replacement for the “encodings.idna” -module that comes with the Python standard library, but which -only supports the older superseded IDNA specification (`RFC 3490 -`_). - -Basic functions are simply executed: - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - - -Installation ------------- - -This package is available for installation from PyPI: - -.. code-block:: bash - - $ python3 -m pip install idna - - -Usage ------ - -For typical usage, the ``encode`` and ``decode`` functions will take a -domain name argument and perform a conversion to A-labels or U-labels -respectively. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - -You may use the codec encoding and decoding methods using the -``idna.codec`` module: - -.. code-block:: pycon - - >>> import idna.codec - >>> print('домен.испытание'.encode('idna2008')) - b'xn--d1acufc.xn--80akhbyknj4f' - >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008')) - домен.испытание - -Conversions can be applied at a per-label basis using the ``ulabel`` or -``alabel`` functions if necessary: - -.. code-block:: pycon - - >>> idna.alabel('测试') - b'xn--0zwm56d' - -Compatibility Mapping (UTS #46) -+++++++++++++++++++++++++++++++ - -As described in `RFC 5895 `_, the -IDNA specification does not normalize input from different potential -ways a user may input a domain name. This functionality, known as -a “mapping”, is considered by the specification to be a local -user-interface issue distinct from IDNA conversion functionality. - -This library provides one such mapping that was developed by the -Unicode Consortium. Known as `Unicode IDNA Compatibility Processing -`_, it provides for both a regular -mapping for typical applications, as well as a transitional mapping to -help migrate from older IDNA 2003 applications. Strings are -preprocessed according to Section 4.4 “Preprocessing for IDNA2008” -prior to the IDNA operations. - -For example, “Königsgäßchen” is not a permissible label as *LATIN -CAPITAL LETTER K* is not allowed (nor are capital letters in general). -UTS 46 will convert this into lower case prior to applying the IDNA -conversion. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('Königsgäßchen') - ... - idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed - >>> idna.encode('Königsgäßchen', uts46=True) - b'xn--knigsgchen-b4a3dun' - >>> print(idna.decode('xn--knigsgchen-b4a3dun')) - königsgäßchen - -Transitional processing provides conversions to help transition from -the older 2003 standard to the current standard. For example, in the -original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was -converted into two *LATIN SMALL LETTER S* (ss), whereas in the current -IDNA specification this conversion is not performed. - -.. code-block:: pycon - - >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) - 'xn--knigsgsschen-lcb0w' - -Implementers should use transitional processing with caution, only in -rare cases where conversion from legacy labels to current labels must be -performed (i.e. IDNA implementations that pre-date 2008). For typical -applications that just need to convert labels, transitional processing -is unlikely to be beneficial and could produce unexpected incompatible -results. - -``encodings.idna`` Compatibility -++++++++++++++++++++++++++++++++ - -Function calls from the Python built-in ``encodings.idna`` module are -mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. -Simply substitute the ``import`` clause in your code to refer to the new -module name. - -Exceptions ----------- - -All errors raised during the conversion following the specification -should raise an exception derived from the ``idna.IDNAError`` base -class. - -More specific exceptions that may be generated as ``idna.IDNABidiError`` -when the error reflects an illegal combination of left-to-right and -right-to-left characters in a label; ``idna.InvalidCodepoint`` when -a specific codepoint is an illegal character in an IDN label (i.e. -INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is -illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ -but the contextual requirements are not satisfied.) - -Building and Diagnostics ------------------------- - -The IDNA and UTS 46 functionality relies upon pre-calculated lookup -tables for performance. These tables are derived from computing against -eligibility criteria in the respective standards. These tables are -computed using the command-line script ``tools/idna-data``. - -This tool will fetch relevant codepoint data from the Unicode repository -and perform the required calculations to identify eligibility. There are -three main modes: - -* ``idna-data make-libdata``. Generates ``idnadata.py`` and - ``uts46data.py``, the pre-calculated lookup tables used for IDNA and - UTS 46 conversions. Implementers who wish to track this library against - a different Unicode version may use this tool to manually generate a - different version of the ``idnadata.py`` and ``uts46data.py`` files. - -* ``idna-data make-table``. Generate a table of the IDNA disposition - (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix - B.1 of RFC 5892 and the pre-computed tables published by `IANA - `_. - -* ``idna-data U+0061``. Prints debugging output on the various - properties associated with an individual Unicode codepoint (in this - case, U+0061), that are used to assess the IDNA and UTS 46 status of a - codepoint. This is helpful in debugging or analysis. - -The tool accepts a number of arguments, described using ``idna-data --h``. Most notably, the ``--version`` argument allows the specification -of the version of Unicode to be used in computing the table data. For -example, ``idna-data --version 9.0.0 make-libdata`` will generate -library data against Unicode 9.0.0. - - -Additional Notes ----------------- - -* **Packages**. The latest tagged release version is published in the - `Python Package Index `_. - -* **Version support**. This library supports Python 3.6 and higher. - As this library serves as a low-level toolkit for a variety of - applications, many of which strive for broad compatibility with older - Python versions, there is no rush to remove older interpreter support. - Removing support for older versions should be well justified in that the - maintenance burden has become too high. - -* **Python 2**. Python 2 is supported by version 2.x of this library. - Use "idna<3" in your requirements file if you need this library for - a Python 2 application. Be advised that these versions are no longer - actively developed. - -* **Testing**. The library has a test suite based on each rule of the - IDNA specification, as well as tests that are provided as part of the - Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing - `_. - -* **Emoji**. It is an occasional request to support emoji domains in - this library. Encoding of symbols like emoji is expressly prohibited by - the technical standard IDNA 2008 and emoji domains are broadly phased - out across the domain industry due to associated security risks. For - now, applications that need to support these non-compliant labels - may wish to consider trying the encode/decode operation in this library - first, and then falling back to using `encodings.idna`. See `the Github - project `_ for more discussion. - diff --git a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/RECORD b/venv/lib/python3.12/site-packages/idna-3.10.dist-info/RECORD deleted file mode 100644 index 9cfce7f9..00000000 --- a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -idna-3.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -idna-3.10.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 -idna-3.10.dist-info/METADATA,sha256=URR5ZyDfQ1PCEGhkYoojqfi2Ra0tau2--lhwG4XSfjI,10158 -idna-3.10.dist-info/RECORD,, -idna-3.10.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 -idna/__pycache__/__init__.cpython-312.pyc,, -idna/__pycache__/codec.cpython-312.pyc,, -idna/__pycache__/compat.cpython-312.pyc,, -idna/__pycache__/core.cpython-312.pyc,, -idna/__pycache__/idnadata.cpython-312.pyc,, -idna/__pycache__/intranges.cpython-312.pyc,, -idna/__pycache__/package_data.cpython-312.pyc,, -idna/__pycache__/uts46data.cpython-312.pyc,, -idna/codec.py,sha256=PEew3ItwzjW4hymbasnty2N2OXvNcgHB-JjrBuxHPYY,3422 -idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 -idna/core.py,sha256=YJYyAMnwiQEPjVC4-Fqu_p4CJ6yKKuDGmppBNQNQpFs,13239 -idna/idnadata.py,sha256=W30GcIGvtOWYwAjZj4ZjuouUutC6ffgNuyjJy7fZ-lo,78306 -idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 -idna/package_data.py,sha256=q59S3OXsc5VI8j6vSD0sGBMyk6zZ4vWFREE88yCJYKs,21 -idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -idna/uts46data.py,sha256=rt90K9J40gUSwppDPCrhjgi5AA6pWM65dEGRSf6rIhM,239289 diff --git a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/WHEEL b/venv/lib/python3.12/site-packages/idna-3.10.dist-info/WHEEL deleted file mode 100644 index 3b5e64b5..00000000 --- a/venv/lib/python3.12/site-packages/idna-3.10.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/idna/__init__.py b/venv/lib/python3.12/site-packages/idna/__init__.py deleted file mode 100644 index cfdc030a..00000000 --- a/venv/lib/python3.12/site-packages/idna/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -from .core import ( - IDNABidiError, - IDNAError, - InvalidCodepoint, - InvalidCodepointContext, - alabel, - check_bidi, - check_hyphen_ok, - check_initial_combiner, - check_label, - check_nfc, - decode, - encode, - ulabel, - uts46_remap, - valid_contextj, - valid_contexto, - valid_label_length, - valid_string_length, -) -from .intranges import intranges_contain -from .package_data import __version__ - -__all__ = [ - "__version__", - "IDNABidiError", - "IDNAError", - "InvalidCodepoint", - "InvalidCodepointContext", - "alabel", - "check_bidi", - "check_hyphen_ok", - "check_initial_combiner", - "check_label", - "check_nfc", - "decode", - "encode", - "intranges_contain", - "ulabel", - "uts46_remap", - "valid_contextj", - "valid_contexto", - "valid_label_length", - "valid_string_length", -] diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 0c1a41ab..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc deleted file mode 100644 index d917a8ec..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc deleted file mode 100644 index 438a26c1..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc deleted file mode 100644 index a798a5b2..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc deleted file mode 100644 index 5856ce67..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/idnadata.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc deleted file mode 100644 index f82e78a0..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc deleted file mode 100644 index 53c20fcd..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc deleted file mode 100644 index 15f99264..00000000 Binary files a/venv/lib/python3.12/site-packages/idna/__pycache__/uts46data.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/idna/codec.py b/venv/lib/python3.12/site-packages/idna/codec.py deleted file mode 100644 index 913abfd6..00000000 --- a/venv/lib/python3.12/site-packages/idna/codec.py +++ /dev/null @@ -1,122 +0,0 @@ -import codecs -import re -from typing import Any, Optional, Tuple - -from .core import IDNAError, alabel, decode, encode, ulabel - -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class Codec(codecs.Codec): - def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - return encode(data), len(data) - - def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return "", 0 - - return decode(data), len(data) - - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - labels = _unicode_dots_re.split(data) - trailing_dot = b"" - if labels: - if not labels[-1]: - trailing_dot = b"." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = b"." - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result_bytes = b".".join(result) + trailing_dot - size += len(trailing_dot) - return result_bytes, size - - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return ("", 0) - - if not isinstance(data, str): - data = str(data, "ascii") - - labels = _unicode_dots_re.split(data) - trailing_dot = "" - if labels: - if not labels[-1]: - trailing_dot = "." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = "." - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result_str = ".".join(result) + trailing_dot - size += len(trailing_dot) - return (result_str, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - - -class StreamReader(Codec, codecs.StreamReader): - pass - - -def search_function(name: str) -> Optional[codecs.CodecInfo]: - if name != "idna2008": - return None - return codecs.CodecInfo( - name=name, - encode=Codec().encode, - decode=Codec().decode, - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) - - -codecs.register(search_function) diff --git a/venv/lib/python3.12/site-packages/idna/compat.py b/venv/lib/python3.12/site-packages/idna/compat.py deleted file mode 100644 index 1df9f2a7..00000000 --- a/venv/lib/python3.12/site-packages/idna/compat.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any, Union - -from .core import decode, encode - - -def ToASCII(label: str) -> bytes: - return encode(label) - - -def ToUnicode(label: Union[bytes, bytearray]) -> str: - return decode(label) - - -def nameprep(s: Any) -> None: - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/venv/lib/python3.12/site-packages/idna/core.py b/venv/lib/python3.12/site-packages/idna/core.py deleted file mode 100644 index 9115f123..00000000 --- a/venv/lib/python3.12/site-packages/idna/core.py +++ /dev/null @@ -1,437 +0,0 @@ -import bisect -import re -import unicodedata -from typing import Optional, Union - -from . import idnadata -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b"xn--" -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class IDNAError(UnicodeError): - """Base exception for all IDNA-encoding related problems""" - - pass - - -class IDNABidiError(IDNAError): - """Exception when bidirectional requirements are not satisfied""" - - pass - - -class InvalidCodepoint(IDNAError): - """Exception when a disallowed or unallocated codepoint is used""" - - pass - - -class InvalidCodepointContext(IDNAError): - """Exception when the codepoint is not valid in the context it is used""" - - pass - - -def _combining_class(cp: int) -> int: - v = unicodedata.combining(chr(cp)) - if v == 0: - if not unicodedata.name(chr(cp)): - raise ValueError("Unknown character in unicodedata") - return v - - -def _is_script(cp: str, script: str) -> bool: - return intranges_contain(ord(cp), idnadata.scripts[script]) - - -def _punycode(s: str) -> bytes: - return s.encode("punycode") - - -def _unot(s: int) -> str: - return "U+{:04X}".format(s) - - -def valid_label_length(label: Union[bytes, str]) -> bool: - if len(label) > 63: - return False - return True - - -def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label: str, check_ltr: bool = False) -> bool: - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == "": - # String likely comes from a newer version of Unicode - raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) - if direction in ["R", "AL", "AN"]: - bidi_label = True - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ["R", "AL"]: - rtl = True - elif direction == "L": - rtl = False - else: - raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) - - valid_ending = False - number_type: Optional[str] = None - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if direction not in [ - "R", - "AL", - "AN", - "EN", - "ES", - "CS", - "ET", - "ON", - "BN", - "NSM", - ]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) - # Bidi rule 3 - if direction in ["R", "AL", "EN", "AN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - # Bidi rule 4 - if direction in ["AN", "EN"]: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError("Can not mix numeral types in a right-to-left label") - else: - # Bidi rule 5 - if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) - # Bidi rule 6 - if direction in ["L", "EN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - - if not valid_ending: - raise IDNABidiError("Label ends with illegal codepoint directionality") - - return True - - -def check_initial_combiner(label: str) -> bool: - if unicodedata.category(label[0])[0] == "M": - raise IDNAError("Label begins with an illegal combining character") - return True - - -def check_hyphen_ok(label: str) -> bool: - if label[2:4] == "--": - raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") - if label[0] == "-" or label[-1] == "-": - raise IDNAError("Label must not start or end with a hyphen") - return True - - -def check_nfc(label: str) -> None: - if unicodedata.normalize("NFC", label) != label: - raise IDNAError("Label must be in Normalization Form C") - - -def valid_contextj(label: str, pos: int) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x200C: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos - 1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("L"), ord("D")]: - ok = True - break - else: - break - - if not ok: - return False - - ok = False - for i in range(pos + 1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("R"), ord("D")]: - ok = True - break - else: - break - return ok - - if cp_value == 0x200D: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - return False - - -def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x00B7: - if 0 < pos < len(label) - 1: - if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label) - 1 and len(label) > 1: - return _is_script(label[pos + 1], "Greek") - return False - - elif cp_value == 0x05F3 or cp_value == 0x05F4: - if pos > 0: - return _is_script(label[pos - 1], "Hebrew") - return False - - elif cp_value == 0x30FB: - for cp in label: - if cp == "\u30fb": - continue - if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6F0 <= ord(cp) <= 0x06F9: - return False - return True - - elif 0x6F0 <= cp_value <= 0x6F9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - return False - - -def check_label(label: Union[str, bytes, bytearray]) -> None: - if isinstance(label, (bytes, bytearray)): - label = label.decode("utf-8") - if len(label) == 0: - raise IDNAError("Empty Label") - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for pos, cp in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext( - "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - except ValueError: - raise IDNAError( - "Unknown codepoint adjacent to joiner {} at position {} in {}".format( - _unot(cp_value), pos + 1, repr(label) - ) - ) - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): - if not valid_contexto(label, pos): - raise InvalidCodepointContext( - "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - else: - raise InvalidCodepoint( - "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) - ) - - check_bidi(label) - - -def alabel(label: str) -> bytes: - try: - label_bytes = label.encode("ascii") - ulabel(label_bytes) - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - return label_bytes - except UnicodeEncodeError: - pass - - check_label(label) - label_bytes = _alabel_prefix + _punycode(label) - - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - - return label_bytes - - -def ulabel(label: Union[str, bytes, bytearray]) -> str: - if not isinstance(label, (bytes, bytearray)): - try: - label_bytes = label.encode("ascii") - except UnicodeEncodeError: - check_label(label) - return label - else: - label_bytes = label - - label_bytes = label_bytes.lower() - if label_bytes.startswith(_alabel_prefix): - label_bytes = label_bytes[len(_alabel_prefix) :] - if not label_bytes: - raise IDNAError("Malformed A-label, no Punycode eligible content found") - if label_bytes.decode("ascii")[-1] == "-": - raise IDNAError("A-label must not end with a hyphen") - else: - check_label(label_bytes) - return label_bytes.decode("ascii") - - try: - label = label_bytes.decode("punycode") - except UnicodeError: - raise IDNAError("Invalid A-label") - check_label(label) - return label - - -def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - - output = "" - - for pos, char in enumerate(domain): - code_point = ord(char) - try: - uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] - status = uts46row[1] - replacement: Optional[str] = None - if len(uts46row) == 3: - replacement = uts46row[2] - if ( - status == "V" - or (status == "D" and not transitional) - or (status == "3" and not std3_rules and replacement is None) - ): - output += char - elif replacement is not None and ( - status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) - ): - output += replacement - elif status != "I": - raise IndexError() - except IndexError: - raise InvalidCodepoint( - "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) - ) - - return unicodedata.normalize("NFC", output) - - -def encode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, - transitional: bool = False, -) -> bytes: - if not isinstance(s, str): - try: - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("should pass a unicode string to the function rather than a byte string.") - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split(".") - else: - labels = _unicode_dots_re.split(s) - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if labels[-1] == "": - del labels[-1] - trailing_dot = True - for label in labels: - s = alabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append(b"") - s = b".".join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError("Domain too long") - return s - - -def decode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, -) -> str: - try: - if not isinstance(s, str): - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("Invalid ASCII in A-label") - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split(".") - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - s = ulabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append("") - return ".".join(result) diff --git a/venv/lib/python3.12/site-packages/idna/idnadata.py b/venv/lib/python3.12/site-packages/idna/idnadata.py deleted file mode 100644 index 4be60046..00000000 --- a/venv/lib/python3.12/site-packages/idna/idnadata.py +++ /dev/null @@ -1,4243 +0,0 @@ -# This file is automatically generated by tools/idna-data - -__version__ = "15.1.0" -scripts = { - "Greek": ( - 0x37000000374, - 0x37500000378, - 0x37A0000037E, - 0x37F00000380, - 0x38400000385, - 0x38600000387, - 0x3880000038B, - 0x38C0000038D, - 0x38E000003A2, - 0x3A3000003E2, - 0x3F000000400, - 0x1D2600001D2B, - 0x1D5D00001D62, - 0x1D6600001D6B, - 0x1DBF00001DC0, - 0x1F0000001F16, - 0x1F1800001F1E, - 0x1F2000001F46, - 0x1F4800001F4E, - 0x1F5000001F58, - 0x1F5900001F5A, - 0x1F5B00001F5C, - 0x1F5D00001F5E, - 0x1F5F00001F7E, - 0x1F8000001FB5, - 0x1FB600001FC5, - 0x1FC600001FD4, - 0x1FD600001FDC, - 0x1FDD00001FF0, - 0x1FF200001FF5, - 0x1FF600001FFF, - 0x212600002127, - 0xAB650000AB66, - 0x101400001018F, - 0x101A0000101A1, - 0x1D2000001D246, - ), - "Han": ( - 0x2E8000002E9A, - 0x2E9B00002EF4, - 0x2F0000002FD6, - 0x300500003006, - 0x300700003008, - 0x30210000302A, - 0x30380000303C, - 0x340000004DC0, - 0x4E000000A000, - 0xF9000000FA6E, - 0xFA700000FADA, - 0x16FE200016FE4, - 0x16FF000016FF2, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x2F8000002FA1E, - 0x300000003134B, - 0x31350000323B0, - ), - "Hebrew": ( - 0x591000005C8, - 0x5D0000005EB, - 0x5EF000005F5, - 0xFB1D0000FB37, - 0xFB380000FB3D, - 0xFB3E0000FB3F, - 0xFB400000FB42, - 0xFB430000FB45, - 0xFB460000FB50, - ), - "Hiragana": ( - 0x304100003097, - 0x309D000030A0, - 0x1B0010001B120, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1F2000001F201, - ), - "Katakana": ( - 0x30A1000030FB, - 0x30FD00003100, - 0x31F000003200, - 0x32D0000032FF, - 0x330000003358, - 0xFF660000FF70, - 0xFF710000FF9E, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B001, - 0x1B1200001B123, - 0x1B1550001B156, - 0x1B1640001B168, - ), -} -joining_types = { - 0xAD: 84, - 0x300: 84, - 0x301: 84, - 0x302: 84, - 0x303: 84, - 0x304: 84, - 0x305: 84, - 0x306: 84, - 0x307: 84, - 0x308: 84, - 0x309: 84, - 0x30A: 84, - 0x30B: 84, - 0x30C: 84, - 0x30D: 84, - 0x30E: 84, - 0x30F: 84, - 0x310: 84, - 0x311: 84, - 0x312: 84, - 0x313: 84, - 0x314: 84, - 0x315: 84, - 0x316: 84, - 0x317: 84, - 0x318: 84, - 0x319: 84, - 0x31A: 84, - 0x31B: 84, - 0x31C: 84, - 0x31D: 84, - 0x31E: 84, - 0x31F: 84, - 0x320: 84, - 0x321: 84, - 0x322: 84, - 0x323: 84, - 0x324: 84, - 0x325: 84, - 0x326: 84, - 0x327: 84, - 0x328: 84, - 0x329: 84, - 0x32A: 84, - 0x32B: 84, - 0x32C: 84, - 0x32D: 84, - 0x32E: 84, - 0x32F: 84, - 0x330: 84, - 0x331: 84, - 0x332: 84, - 0x333: 84, - 0x334: 84, - 0x335: 84, - 0x336: 84, - 0x337: 84, - 0x338: 84, - 0x339: 84, - 0x33A: 84, - 0x33B: 84, - 0x33C: 84, - 0x33D: 84, - 0x33E: 84, - 0x33F: 84, - 0x340: 84, - 0x341: 84, - 0x342: 84, - 0x343: 84, - 0x344: 84, - 0x345: 84, - 0x346: 84, - 0x347: 84, - 0x348: 84, - 0x349: 84, - 0x34A: 84, - 0x34B: 84, - 0x34C: 84, - 0x34D: 84, - 0x34E: 84, - 0x34F: 84, - 0x350: 84, - 0x351: 84, - 0x352: 84, - 0x353: 84, - 0x354: 84, - 0x355: 84, - 0x356: 84, - 0x357: 84, - 0x358: 84, - 0x359: 84, - 0x35A: 84, - 0x35B: 84, - 0x35C: 84, - 0x35D: 84, - 0x35E: 84, - 0x35F: 84, - 0x360: 84, - 0x361: 84, - 0x362: 84, - 0x363: 84, - 0x364: 84, - 0x365: 84, - 0x366: 84, - 0x367: 84, - 0x368: 84, - 0x369: 84, - 0x36A: 84, - 0x36B: 84, - 0x36C: 84, - 0x36D: 84, - 0x36E: 84, - 0x36F: 84, - 0x483: 84, - 0x484: 84, - 0x485: 84, - 0x486: 84, - 0x487: 84, - 0x488: 84, - 0x489: 84, - 0x591: 84, - 0x592: 84, - 0x593: 84, - 0x594: 84, - 0x595: 84, - 0x596: 84, - 0x597: 84, - 0x598: 84, - 0x599: 84, - 0x59A: 84, - 0x59B: 84, - 0x59C: 84, - 0x59D: 84, - 0x59E: 84, - 0x59F: 84, - 0x5A0: 84, - 0x5A1: 84, - 0x5A2: 84, - 0x5A3: 84, - 0x5A4: 84, - 0x5A5: 84, - 0x5A6: 84, - 0x5A7: 84, - 0x5A8: 84, - 0x5A9: 84, - 0x5AA: 84, - 0x5AB: 84, - 0x5AC: 84, - 0x5AD: 84, - 0x5AE: 84, - 0x5AF: 84, - 0x5B0: 84, - 0x5B1: 84, - 0x5B2: 84, - 0x5B3: 84, - 0x5B4: 84, - 0x5B5: 84, - 0x5B6: 84, - 0x5B7: 84, - 0x5B8: 84, - 0x5B9: 84, - 0x5BA: 84, - 0x5BB: 84, - 0x5BC: 84, - 0x5BD: 84, - 0x5BF: 84, - 0x5C1: 84, - 0x5C2: 84, - 0x5C4: 84, - 0x5C5: 84, - 0x5C7: 84, - 0x610: 84, - 0x611: 84, - 0x612: 84, - 0x613: 84, - 0x614: 84, - 0x615: 84, - 0x616: 84, - 0x617: 84, - 0x618: 84, - 0x619: 84, - 0x61A: 84, - 0x61C: 84, - 0x620: 68, - 0x622: 82, - 0x623: 82, - 0x624: 82, - 0x625: 82, - 0x626: 68, - 0x627: 82, - 0x628: 68, - 0x629: 82, - 0x62A: 68, - 0x62B: 68, - 0x62C: 68, - 0x62D: 68, - 0x62E: 68, - 0x62F: 82, - 0x630: 82, - 0x631: 82, - 0x632: 82, - 0x633: 68, - 0x634: 68, - 0x635: 68, - 0x636: 68, - 0x637: 68, - 0x638: 68, - 0x639: 68, - 0x63A: 68, - 0x63B: 68, - 0x63C: 68, - 0x63D: 68, - 0x63E: 68, - 0x63F: 68, - 0x640: 67, - 0x641: 68, - 0x642: 68, - 0x643: 68, - 0x644: 68, - 0x645: 68, - 0x646: 68, - 0x647: 68, - 0x648: 82, - 0x649: 68, - 0x64A: 68, - 0x64B: 84, - 0x64C: 84, - 0x64D: 84, - 0x64E: 84, - 0x64F: 84, - 0x650: 84, - 0x651: 84, - 0x652: 84, - 0x653: 84, - 0x654: 84, - 0x655: 84, - 0x656: 84, - 0x657: 84, - 0x658: 84, - 0x659: 84, - 0x65A: 84, - 0x65B: 84, - 0x65C: 84, - 0x65D: 84, - 0x65E: 84, - 0x65F: 84, - 0x66E: 68, - 0x66F: 68, - 0x670: 84, - 0x671: 82, - 0x672: 82, - 0x673: 82, - 0x675: 82, - 0x676: 82, - 0x677: 82, - 0x678: 68, - 0x679: 68, - 0x67A: 68, - 0x67B: 68, - 0x67C: 68, - 0x67D: 68, - 0x67E: 68, - 0x67F: 68, - 0x680: 68, - 0x681: 68, - 0x682: 68, - 0x683: 68, - 0x684: 68, - 0x685: 68, - 0x686: 68, - 0x687: 68, - 0x688: 82, - 0x689: 82, - 0x68A: 82, - 0x68B: 82, - 0x68C: 82, - 0x68D: 82, - 0x68E: 82, - 0x68F: 82, - 0x690: 82, - 0x691: 82, - 0x692: 82, - 0x693: 82, - 0x694: 82, - 0x695: 82, - 0x696: 82, - 0x697: 82, - 0x698: 82, - 0x699: 82, - 0x69A: 68, - 0x69B: 68, - 0x69C: 68, - 0x69D: 68, - 0x69E: 68, - 0x69F: 68, - 0x6A0: 68, - 0x6A1: 68, - 0x6A2: 68, - 0x6A3: 68, - 0x6A4: 68, - 0x6A5: 68, - 0x6A6: 68, - 0x6A7: 68, - 0x6A8: 68, - 0x6A9: 68, - 0x6AA: 68, - 0x6AB: 68, - 0x6AC: 68, - 0x6AD: 68, - 0x6AE: 68, - 0x6AF: 68, - 0x6B0: 68, - 0x6B1: 68, - 0x6B2: 68, - 0x6B3: 68, - 0x6B4: 68, - 0x6B5: 68, - 0x6B6: 68, - 0x6B7: 68, - 0x6B8: 68, - 0x6B9: 68, - 0x6BA: 68, - 0x6BB: 68, - 0x6BC: 68, - 0x6BD: 68, - 0x6BE: 68, - 0x6BF: 68, - 0x6C0: 82, - 0x6C1: 68, - 0x6C2: 68, - 0x6C3: 82, - 0x6C4: 82, - 0x6C5: 82, - 0x6C6: 82, - 0x6C7: 82, - 0x6C8: 82, - 0x6C9: 82, - 0x6CA: 82, - 0x6CB: 82, - 0x6CC: 68, - 0x6CD: 82, - 0x6CE: 68, - 0x6CF: 82, - 0x6D0: 68, - 0x6D1: 68, - 0x6D2: 82, - 0x6D3: 82, - 0x6D5: 82, - 0x6D6: 84, - 0x6D7: 84, - 0x6D8: 84, - 0x6D9: 84, - 0x6DA: 84, - 0x6DB: 84, - 0x6DC: 84, - 0x6DF: 84, - 0x6E0: 84, - 0x6E1: 84, - 0x6E2: 84, - 0x6E3: 84, - 0x6E4: 84, - 0x6E7: 84, - 0x6E8: 84, - 0x6EA: 84, - 0x6EB: 84, - 0x6EC: 84, - 0x6ED: 84, - 0x6EE: 82, - 0x6EF: 82, - 0x6FA: 68, - 0x6FB: 68, - 0x6FC: 68, - 0x6FF: 68, - 0x70F: 84, - 0x710: 82, - 0x711: 84, - 0x712: 68, - 0x713: 68, - 0x714: 68, - 0x715: 82, - 0x716: 82, - 0x717: 82, - 0x718: 82, - 0x719: 82, - 0x71A: 68, - 0x71B: 68, - 0x71C: 68, - 0x71D: 68, - 0x71E: 82, - 0x71F: 68, - 0x720: 68, - 0x721: 68, - 0x722: 68, - 0x723: 68, - 0x724: 68, - 0x725: 68, - 0x726: 68, - 0x727: 68, - 0x728: 82, - 0x729: 68, - 0x72A: 82, - 0x72B: 68, - 0x72C: 82, - 0x72D: 68, - 0x72E: 68, - 0x72F: 82, - 0x730: 84, - 0x731: 84, - 0x732: 84, - 0x733: 84, - 0x734: 84, - 0x735: 84, - 0x736: 84, - 0x737: 84, - 0x738: 84, - 0x739: 84, - 0x73A: 84, - 0x73B: 84, - 0x73C: 84, - 0x73D: 84, - 0x73E: 84, - 0x73F: 84, - 0x740: 84, - 0x741: 84, - 0x742: 84, - 0x743: 84, - 0x744: 84, - 0x745: 84, - 0x746: 84, - 0x747: 84, - 0x748: 84, - 0x749: 84, - 0x74A: 84, - 0x74D: 82, - 0x74E: 68, - 0x74F: 68, - 0x750: 68, - 0x751: 68, - 0x752: 68, - 0x753: 68, - 0x754: 68, - 0x755: 68, - 0x756: 68, - 0x757: 68, - 0x758: 68, - 0x759: 82, - 0x75A: 82, - 0x75B: 82, - 0x75C: 68, - 0x75D: 68, - 0x75E: 68, - 0x75F: 68, - 0x760: 68, - 0x761: 68, - 0x762: 68, - 0x763: 68, - 0x764: 68, - 0x765: 68, - 0x766: 68, - 0x767: 68, - 0x768: 68, - 0x769: 68, - 0x76A: 68, - 0x76B: 82, - 0x76C: 82, - 0x76D: 68, - 0x76E: 68, - 0x76F: 68, - 0x770: 68, - 0x771: 82, - 0x772: 68, - 0x773: 82, - 0x774: 82, - 0x775: 68, - 0x776: 68, - 0x777: 68, - 0x778: 82, - 0x779: 82, - 0x77A: 68, - 0x77B: 68, - 0x77C: 68, - 0x77D: 68, - 0x77E: 68, - 0x77F: 68, - 0x7A6: 84, - 0x7A7: 84, - 0x7A8: 84, - 0x7A9: 84, - 0x7AA: 84, - 0x7AB: 84, - 0x7AC: 84, - 0x7AD: 84, - 0x7AE: 84, - 0x7AF: 84, - 0x7B0: 84, - 0x7CA: 68, - 0x7CB: 68, - 0x7CC: 68, - 0x7CD: 68, - 0x7CE: 68, - 0x7CF: 68, - 0x7D0: 68, - 0x7D1: 68, - 0x7D2: 68, - 0x7D3: 68, - 0x7D4: 68, - 0x7D5: 68, - 0x7D6: 68, - 0x7D7: 68, - 0x7D8: 68, - 0x7D9: 68, - 0x7DA: 68, - 0x7DB: 68, - 0x7DC: 68, - 0x7DD: 68, - 0x7DE: 68, - 0x7DF: 68, - 0x7E0: 68, - 0x7E1: 68, - 0x7E2: 68, - 0x7E3: 68, - 0x7E4: 68, - 0x7E5: 68, - 0x7E6: 68, - 0x7E7: 68, - 0x7E8: 68, - 0x7E9: 68, - 0x7EA: 68, - 0x7EB: 84, - 0x7EC: 84, - 0x7ED: 84, - 0x7EE: 84, - 0x7EF: 84, - 0x7F0: 84, - 0x7F1: 84, - 0x7F2: 84, - 0x7F3: 84, - 0x7FA: 67, - 0x7FD: 84, - 0x816: 84, - 0x817: 84, - 0x818: 84, - 0x819: 84, - 0x81B: 84, - 0x81C: 84, - 0x81D: 84, - 0x81E: 84, - 0x81F: 84, - 0x820: 84, - 0x821: 84, - 0x822: 84, - 0x823: 84, - 0x825: 84, - 0x826: 84, - 0x827: 84, - 0x829: 84, - 0x82A: 84, - 0x82B: 84, - 0x82C: 84, - 0x82D: 84, - 0x840: 82, - 0x841: 68, - 0x842: 68, - 0x843: 68, - 0x844: 68, - 0x845: 68, - 0x846: 82, - 0x847: 82, - 0x848: 68, - 0x849: 82, - 0x84A: 68, - 0x84B: 68, - 0x84C: 68, - 0x84D: 68, - 0x84E: 68, - 0x84F: 68, - 0x850: 68, - 0x851: 68, - 0x852: 68, - 0x853: 68, - 0x854: 82, - 0x855: 68, - 0x856: 82, - 0x857: 82, - 0x858: 82, - 0x859: 84, - 0x85A: 84, - 0x85B: 84, - 0x860: 68, - 0x862: 68, - 0x863: 68, - 0x864: 68, - 0x865: 68, - 0x867: 82, - 0x868: 68, - 0x869: 82, - 0x86A: 82, - 0x870: 82, - 0x871: 82, - 0x872: 82, - 0x873: 82, - 0x874: 82, - 0x875: 82, - 0x876: 82, - 0x877: 82, - 0x878: 82, - 0x879: 82, - 0x87A: 82, - 0x87B: 82, - 0x87C: 82, - 0x87D: 82, - 0x87E: 82, - 0x87F: 82, - 0x880: 82, - 0x881: 82, - 0x882: 82, - 0x883: 67, - 0x884: 67, - 0x885: 67, - 0x886: 68, - 0x889: 68, - 0x88A: 68, - 0x88B: 68, - 0x88C: 68, - 0x88D: 68, - 0x88E: 82, - 0x898: 84, - 0x899: 84, - 0x89A: 84, - 0x89B: 84, - 0x89C: 84, - 0x89D: 84, - 0x89E: 84, - 0x89F: 84, - 0x8A0: 68, - 0x8A1: 68, - 0x8A2: 68, - 0x8A3: 68, - 0x8A4: 68, - 0x8A5: 68, - 0x8A6: 68, - 0x8A7: 68, - 0x8A8: 68, - 0x8A9: 68, - 0x8AA: 82, - 0x8AB: 82, - 0x8AC: 82, - 0x8AE: 82, - 0x8AF: 68, - 0x8B0: 68, - 0x8B1: 82, - 0x8B2: 82, - 0x8B3: 68, - 0x8B4: 68, - 0x8B5: 68, - 0x8B6: 68, - 0x8B7: 68, - 0x8B8: 68, - 0x8B9: 82, - 0x8BA: 68, - 0x8BB: 68, - 0x8BC: 68, - 0x8BD: 68, - 0x8BE: 68, - 0x8BF: 68, - 0x8C0: 68, - 0x8C1: 68, - 0x8C2: 68, - 0x8C3: 68, - 0x8C4: 68, - 0x8C5: 68, - 0x8C6: 68, - 0x8C7: 68, - 0x8C8: 68, - 0x8CA: 84, - 0x8CB: 84, - 0x8CC: 84, - 0x8CD: 84, - 0x8CE: 84, - 0x8CF: 84, - 0x8D0: 84, - 0x8D1: 84, - 0x8D2: 84, - 0x8D3: 84, - 0x8D4: 84, - 0x8D5: 84, - 0x8D6: 84, - 0x8D7: 84, - 0x8D8: 84, - 0x8D9: 84, - 0x8DA: 84, - 0x8DB: 84, - 0x8DC: 84, - 0x8DD: 84, - 0x8DE: 84, - 0x8DF: 84, - 0x8E0: 84, - 0x8E1: 84, - 0x8E3: 84, - 0x8E4: 84, - 0x8E5: 84, - 0x8E6: 84, - 0x8E7: 84, - 0x8E8: 84, - 0x8E9: 84, - 0x8EA: 84, - 0x8EB: 84, - 0x8EC: 84, - 0x8ED: 84, - 0x8EE: 84, - 0x8EF: 84, - 0x8F0: 84, - 0x8F1: 84, - 0x8F2: 84, - 0x8F3: 84, - 0x8F4: 84, - 0x8F5: 84, - 0x8F6: 84, - 0x8F7: 84, - 0x8F8: 84, - 0x8F9: 84, - 0x8FA: 84, - 0x8FB: 84, - 0x8FC: 84, - 0x8FD: 84, - 0x8FE: 84, - 0x8FF: 84, - 0x900: 84, - 0x901: 84, - 0x902: 84, - 0x93A: 84, - 0x93C: 84, - 0x941: 84, - 0x942: 84, - 0x943: 84, - 0x944: 84, - 0x945: 84, - 0x946: 84, - 0x947: 84, - 0x948: 84, - 0x94D: 84, - 0x951: 84, - 0x952: 84, - 0x953: 84, - 0x954: 84, - 0x955: 84, - 0x956: 84, - 0x957: 84, - 0x962: 84, - 0x963: 84, - 0x981: 84, - 0x9BC: 84, - 0x9C1: 84, - 0x9C2: 84, - 0x9C3: 84, - 0x9C4: 84, - 0x9CD: 84, - 0x9E2: 84, - 0x9E3: 84, - 0x9FE: 84, - 0xA01: 84, - 0xA02: 84, - 0xA3C: 84, - 0xA41: 84, - 0xA42: 84, - 0xA47: 84, - 0xA48: 84, - 0xA4B: 84, - 0xA4C: 84, - 0xA4D: 84, - 0xA51: 84, - 0xA70: 84, - 0xA71: 84, - 0xA75: 84, - 0xA81: 84, - 0xA82: 84, - 0xABC: 84, - 0xAC1: 84, - 0xAC2: 84, - 0xAC3: 84, - 0xAC4: 84, - 0xAC5: 84, - 0xAC7: 84, - 0xAC8: 84, - 0xACD: 84, - 0xAE2: 84, - 0xAE3: 84, - 0xAFA: 84, - 0xAFB: 84, - 0xAFC: 84, - 0xAFD: 84, - 0xAFE: 84, - 0xAFF: 84, - 0xB01: 84, - 0xB3C: 84, - 0xB3F: 84, - 0xB41: 84, - 0xB42: 84, - 0xB43: 84, - 0xB44: 84, - 0xB4D: 84, - 0xB55: 84, - 0xB56: 84, - 0xB62: 84, - 0xB63: 84, - 0xB82: 84, - 0xBC0: 84, - 0xBCD: 84, - 0xC00: 84, - 0xC04: 84, - 0xC3C: 84, - 0xC3E: 84, - 0xC3F: 84, - 0xC40: 84, - 0xC46: 84, - 0xC47: 84, - 0xC48: 84, - 0xC4A: 84, - 0xC4B: 84, - 0xC4C: 84, - 0xC4D: 84, - 0xC55: 84, - 0xC56: 84, - 0xC62: 84, - 0xC63: 84, - 0xC81: 84, - 0xCBC: 84, - 0xCBF: 84, - 0xCC6: 84, - 0xCCC: 84, - 0xCCD: 84, - 0xCE2: 84, - 0xCE3: 84, - 0xD00: 84, - 0xD01: 84, - 0xD3B: 84, - 0xD3C: 84, - 0xD41: 84, - 0xD42: 84, - 0xD43: 84, - 0xD44: 84, - 0xD4D: 84, - 0xD62: 84, - 0xD63: 84, - 0xD81: 84, - 0xDCA: 84, - 0xDD2: 84, - 0xDD3: 84, - 0xDD4: 84, - 0xDD6: 84, - 0xE31: 84, - 0xE34: 84, - 0xE35: 84, - 0xE36: 84, - 0xE37: 84, - 0xE38: 84, - 0xE39: 84, - 0xE3A: 84, - 0xE47: 84, - 0xE48: 84, - 0xE49: 84, - 0xE4A: 84, - 0xE4B: 84, - 0xE4C: 84, - 0xE4D: 84, - 0xE4E: 84, - 0xEB1: 84, - 0xEB4: 84, - 0xEB5: 84, - 0xEB6: 84, - 0xEB7: 84, - 0xEB8: 84, - 0xEB9: 84, - 0xEBA: 84, - 0xEBB: 84, - 0xEBC: 84, - 0xEC8: 84, - 0xEC9: 84, - 0xECA: 84, - 0xECB: 84, - 0xECC: 84, - 0xECD: 84, - 0xECE: 84, - 0xF18: 84, - 0xF19: 84, - 0xF35: 84, - 0xF37: 84, - 0xF39: 84, - 0xF71: 84, - 0xF72: 84, - 0xF73: 84, - 0xF74: 84, - 0xF75: 84, - 0xF76: 84, - 0xF77: 84, - 0xF78: 84, - 0xF79: 84, - 0xF7A: 84, - 0xF7B: 84, - 0xF7C: 84, - 0xF7D: 84, - 0xF7E: 84, - 0xF80: 84, - 0xF81: 84, - 0xF82: 84, - 0xF83: 84, - 0xF84: 84, - 0xF86: 84, - 0xF87: 84, - 0xF8D: 84, - 0xF8E: 84, - 0xF8F: 84, - 0xF90: 84, - 0xF91: 84, - 0xF92: 84, - 0xF93: 84, - 0xF94: 84, - 0xF95: 84, - 0xF96: 84, - 0xF97: 84, - 0xF99: 84, - 0xF9A: 84, - 0xF9B: 84, - 0xF9C: 84, - 0xF9D: 84, - 0xF9E: 84, - 0xF9F: 84, - 0xFA0: 84, - 0xFA1: 84, - 0xFA2: 84, - 0xFA3: 84, - 0xFA4: 84, - 0xFA5: 84, - 0xFA6: 84, - 0xFA7: 84, - 0xFA8: 84, - 0xFA9: 84, - 0xFAA: 84, - 0xFAB: 84, - 0xFAC: 84, - 0xFAD: 84, - 0xFAE: 84, - 0xFAF: 84, - 0xFB0: 84, - 0xFB1: 84, - 0xFB2: 84, - 0xFB3: 84, - 0xFB4: 84, - 0xFB5: 84, - 0xFB6: 84, - 0xFB7: 84, - 0xFB8: 84, - 0xFB9: 84, - 0xFBA: 84, - 0xFBB: 84, - 0xFBC: 84, - 0xFC6: 84, - 0x102D: 84, - 0x102E: 84, - 0x102F: 84, - 0x1030: 84, - 0x1032: 84, - 0x1033: 84, - 0x1034: 84, - 0x1035: 84, - 0x1036: 84, - 0x1037: 84, - 0x1039: 84, - 0x103A: 84, - 0x103D: 84, - 0x103E: 84, - 0x1058: 84, - 0x1059: 84, - 0x105E: 84, - 0x105F: 84, - 0x1060: 84, - 0x1071: 84, - 0x1072: 84, - 0x1073: 84, - 0x1074: 84, - 0x1082: 84, - 0x1085: 84, - 0x1086: 84, - 0x108D: 84, - 0x109D: 84, - 0x135D: 84, - 0x135E: 84, - 0x135F: 84, - 0x1712: 84, - 0x1713: 84, - 0x1714: 84, - 0x1732: 84, - 0x1733: 84, - 0x1752: 84, - 0x1753: 84, - 0x1772: 84, - 0x1773: 84, - 0x17B4: 84, - 0x17B5: 84, - 0x17B7: 84, - 0x17B8: 84, - 0x17B9: 84, - 0x17BA: 84, - 0x17BB: 84, - 0x17BC: 84, - 0x17BD: 84, - 0x17C6: 84, - 0x17C9: 84, - 0x17CA: 84, - 0x17CB: 84, - 0x17CC: 84, - 0x17CD: 84, - 0x17CE: 84, - 0x17CF: 84, - 0x17D0: 84, - 0x17D1: 84, - 0x17D2: 84, - 0x17D3: 84, - 0x17DD: 84, - 0x1807: 68, - 0x180A: 67, - 0x180B: 84, - 0x180C: 84, - 0x180D: 84, - 0x180F: 84, - 0x1820: 68, - 0x1821: 68, - 0x1822: 68, - 0x1823: 68, - 0x1824: 68, - 0x1825: 68, - 0x1826: 68, - 0x1827: 68, - 0x1828: 68, - 0x1829: 68, - 0x182A: 68, - 0x182B: 68, - 0x182C: 68, - 0x182D: 68, - 0x182E: 68, - 0x182F: 68, - 0x1830: 68, - 0x1831: 68, - 0x1832: 68, - 0x1833: 68, - 0x1834: 68, - 0x1835: 68, - 0x1836: 68, - 0x1837: 68, - 0x1838: 68, - 0x1839: 68, - 0x183A: 68, - 0x183B: 68, - 0x183C: 68, - 0x183D: 68, - 0x183E: 68, - 0x183F: 68, - 0x1840: 68, - 0x1841: 68, - 0x1842: 68, - 0x1843: 68, - 0x1844: 68, - 0x1845: 68, - 0x1846: 68, - 0x1847: 68, - 0x1848: 68, - 0x1849: 68, - 0x184A: 68, - 0x184B: 68, - 0x184C: 68, - 0x184D: 68, - 0x184E: 68, - 0x184F: 68, - 0x1850: 68, - 0x1851: 68, - 0x1852: 68, - 0x1853: 68, - 0x1854: 68, - 0x1855: 68, - 0x1856: 68, - 0x1857: 68, - 0x1858: 68, - 0x1859: 68, - 0x185A: 68, - 0x185B: 68, - 0x185C: 68, - 0x185D: 68, - 0x185E: 68, - 0x185F: 68, - 0x1860: 68, - 0x1861: 68, - 0x1862: 68, - 0x1863: 68, - 0x1864: 68, - 0x1865: 68, - 0x1866: 68, - 0x1867: 68, - 0x1868: 68, - 0x1869: 68, - 0x186A: 68, - 0x186B: 68, - 0x186C: 68, - 0x186D: 68, - 0x186E: 68, - 0x186F: 68, - 0x1870: 68, - 0x1871: 68, - 0x1872: 68, - 0x1873: 68, - 0x1874: 68, - 0x1875: 68, - 0x1876: 68, - 0x1877: 68, - 0x1878: 68, - 0x1885: 84, - 0x1886: 84, - 0x1887: 68, - 0x1888: 68, - 0x1889: 68, - 0x188A: 68, - 0x188B: 68, - 0x188C: 68, - 0x188D: 68, - 0x188E: 68, - 0x188F: 68, - 0x1890: 68, - 0x1891: 68, - 0x1892: 68, - 0x1893: 68, - 0x1894: 68, - 0x1895: 68, - 0x1896: 68, - 0x1897: 68, - 0x1898: 68, - 0x1899: 68, - 0x189A: 68, - 0x189B: 68, - 0x189C: 68, - 0x189D: 68, - 0x189E: 68, - 0x189F: 68, - 0x18A0: 68, - 0x18A1: 68, - 0x18A2: 68, - 0x18A3: 68, - 0x18A4: 68, - 0x18A5: 68, - 0x18A6: 68, - 0x18A7: 68, - 0x18A8: 68, - 0x18A9: 84, - 0x18AA: 68, - 0x1920: 84, - 0x1921: 84, - 0x1922: 84, - 0x1927: 84, - 0x1928: 84, - 0x1932: 84, - 0x1939: 84, - 0x193A: 84, - 0x193B: 84, - 0x1A17: 84, - 0x1A18: 84, - 0x1A1B: 84, - 0x1A56: 84, - 0x1A58: 84, - 0x1A59: 84, - 0x1A5A: 84, - 0x1A5B: 84, - 0x1A5C: 84, - 0x1A5D: 84, - 0x1A5E: 84, - 0x1A60: 84, - 0x1A62: 84, - 0x1A65: 84, - 0x1A66: 84, - 0x1A67: 84, - 0x1A68: 84, - 0x1A69: 84, - 0x1A6A: 84, - 0x1A6B: 84, - 0x1A6C: 84, - 0x1A73: 84, - 0x1A74: 84, - 0x1A75: 84, - 0x1A76: 84, - 0x1A77: 84, - 0x1A78: 84, - 0x1A79: 84, - 0x1A7A: 84, - 0x1A7B: 84, - 0x1A7C: 84, - 0x1A7F: 84, - 0x1AB0: 84, - 0x1AB1: 84, - 0x1AB2: 84, - 0x1AB3: 84, - 0x1AB4: 84, - 0x1AB5: 84, - 0x1AB6: 84, - 0x1AB7: 84, - 0x1AB8: 84, - 0x1AB9: 84, - 0x1ABA: 84, - 0x1ABB: 84, - 0x1ABC: 84, - 0x1ABD: 84, - 0x1ABE: 84, - 0x1ABF: 84, - 0x1AC0: 84, - 0x1AC1: 84, - 0x1AC2: 84, - 0x1AC3: 84, - 0x1AC4: 84, - 0x1AC5: 84, - 0x1AC6: 84, - 0x1AC7: 84, - 0x1AC8: 84, - 0x1AC9: 84, - 0x1ACA: 84, - 0x1ACB: 84, - 0x1ACC: 84, - 0x1ACD: 84, - 0x1ACE: 84, - 0x1B00: 84, - 0x1B01: 84, - 0x1B02: 84, - 0x1B03: 84, - 0x1B34: 84, - 0x1B36: 84, - 0x1B37: 84, - 0x1B38: 84, - 0x1B39: 84, - 0x1B3A: 84, - 0x1B3C: 84, - 0x1B42: 84, - 0x1B6B: 84, - 0x1B6C: 84, - 0x1B6D: 84, - 0x1B6E: 84, - 0x1B6F: 84, - 0x1B70: 84, - 0x1B71: 84, - 0x1B72: 84, - 0x1B73: 84, - 0x1B80: 84, - 0x1B81: 84, - 0x1BA2: 84, - 0x1BA3: 84, - 0x1BA4: 84, - 0x1BA5: 84, - 0x1BA8: 84, - 0x1BA9: 84, - 0x1BAB: 84, - 0x1BAC: 84, - 0x1BAD: 84, - 0x1BE6: 84, - 0x1BE8: 84, - 0x1BE9: 84, - 0x1BED: 84, - 0x1BEF: 84, - 0x1BF0: 84, - 0x1BF1: 84, - 0x1C2C: 84, - 0x1C2D: 84, - 0x1C2E: 84, - 0x1C2F: 84, - 0x1C30: 84, - 0x1C31: 84, - 0x1C32: 84, - 0x1C33: 84, - 0x1C36: 84, - 0x1C37: 84, - 0x1CD0: 84, - 0x1CD1: 84, - 0x1CD2: 84, - 0x1CD4: 84, - 0x1CD5: 84, - 0x1CD6: 84, - 0x1CD7: 84, - 0x1CD8: 84, - 0x1CD9: 84, - 0x1CDA: 84, - 0x1CDB: 84, - 0x1CDC: 84, - 0x1CDD: 84, - 0x1CDE: 84, - 0x1CDF: 84, - 0x1CE0: 84, - 0x1CE2: 84, - 0x1CE3: 84, - 0x1CE4: 84, - 0x1CE5: 84, - 0x1CE6: 84, - 0x1CE7: 84, - 0x1CE8: 84, - 0x1CED: 84, - 0x1CF4: 84, - 0x1CF8: 84, - 0x1CF9: 84, - 0x1DC0: 84, - 0x1DC1: 84, - 0x1DC2: 84, - 0x1DC3: 84, - 0x1DC4: 84, - 0x1DC5: 84, - 0x1DC6: 84, - 0x1DC7: 84, - 0x1DC8: 84, - 0x1DC9: 84, - 0x1DCA: 84, - 0x1DCB: 84, - 0x1DCC: 84, - 0x1DCD: 84, - 0x1DCE: 84, - 0x1DCF: 84, - 0x1DD0: 84, - 0x1DD1: 84, - 0x1DD2: 84, - 0x1DD3: 84, - 0x1DD4: 84, - 0x1DD5: 84, - 0x1DD6: 84, - 0x1DD7: 84, - 0x1DD8: 84, - 0x1DD9: 84, - 0x1DDA: 84, - 0x1DDB: 84, - 0x1DDC: 84, - 0x1DDD: 84, - 0x1DDE: 84, - 0x1DDF: 84, - 0x1DE0: 84, - 0x1DE1: 84, - 0x1DE2: 84, - 0x1DE3: 84, - 0x1DE4: 84, - 0x1DE5: 84, - 0x1DE6: 84, - 0x1DE7: 84, - 0x1DE8: 84, - 0x1DE9: 84, - 0x1DEA: 84, - 0x1DEB: 84, - 0x1DEC: 84, - 0x1DED: 84, - 0x1DEE: 84, - 0x1DEF: 84, - 0x1DF0: 84, - 0x1DF1: 84, - 0x1DF2: 84, - 0x1DF3: 84, - 0x1DF4: 84, - 0x1DF5: 84, - 0x1DF6: 84, - 0x1DF7: 84, - 0x1DF8: 84, - 0x1DF9: 84, - 0x1DFA: 84, - 0x1DFB: 84, - 0x1DFC: 84, - 0x1DFD: 84, - 0x1DFE: 84, - 0x1DFF: 84, - 0x200B: 84, - 0x200D: 67, - 0x200E: 84, - 0x200F: 84, - 0x202A: 84, - 0x202B: 84, - 0x202C: 84, - 0x202D: 84, - 0x202E: 84, - 0x2060: 84, - 0x2061: 84, - 0x2062: 84, - 0x2063: 84, - 0x2064: 84, - 0x206A: 84, - 0x206B: 84, - 0x206C: 84, - 0x206D: 84, - 0x206E: 84, - 0x206F: 84, - 0x20D0: 84, - 0x20D1: 84, - 0x20D2: 84, - 0x20D3: 84, - 0x20D4: 84, - 0x20D5: 84, - 0x20D6: 84, - 0x20D7: 84, - 0x20D8: 84, - 0x20D9: 84, - 0x20DA: 84, - 0x20DB: 84, - 0x20DC: 84, - 0x20DD: 84, - 0x20DE: 84, - 0x20DF: 84, - 0x20E0: 84, - 0x20E1: 84, - 0x20E2: 84, - 0x20E3: 84, - 0x20E4: 84, - 0x20E5: 84, - 0x20E6: 84, - 0x20E7: 84, - 0x20E8: 84, - 0x20E9: 84, - 0x20EA: 84, - 0x20EB: 84, - 0x20EC: 84, - 0x20ED: 84, - 0x20EE: 84, - 0x20EF: 84, - 0x20F0: 84, - 0x2CEF: 84, - 0x2CF0: 84, - 0x2CF1: 84, - 0x2D7F: 84, - 0x2DE0: 84, - 0x2DE1: 84, - 0x2DE2: 84, - 0x2DE3: 84, - 0x2DE4: 84, - 0x2DE5: 84, - 0x2DE6: 84, - 0x2DE7: 84, - 0x2DE8: 84, - 0x2DE9: 84, - 0x2DEA: 84, - 0x2DEB: 84, - 0x2DEC: 84, - 0x2DED: 84, - 0x2DEE: 84, - 0x2DEF: 84, - 0x2DF0: 84, - 0x2DF1: 84, - 0x2DF2: 84, - 0x2DF3: 84, - 0x2DF4: 84, - 0x2DF5: 84, - 0x2DF6: 84, - 0x2DF7: 84, - 0x2DF8: 84, - 0x2DF9: 84, - 0x2DFA: 84, - 0x2DFB: 84, - 0x2DFC: 84, - 0x2DFD: 84, - 0x2DFE: 84, - 0x2DFF: 84, - 0x302A: 84, - 0x302B: 84, - 0x302C: 84, - 0x302D: 84, - 0x3099: 84, - 0x309A: 84, - 0xA66F: 84, - 0xA670: 84, - 0xA671: 84, - 0xA672: 84, - 0xA674: 84, - 0xA675: 84, - 0xA676: 84, - 0xA677: 84, - 0xA678: 84, - 0xA679: 84, - 0xA67A: 84, - 0xA67B: 84, - 0xA67C: 84, - 0xA67D: 84, - 0xA69E: 84, - 0xA69F: 84, - 0xA6F0: 84, - 0xA6F1: 84, - 0xA802: 84, - 0xA806: 84, - 0xA80B: 84, - 0xA825: 84, - 0xA826: 84, - 0xA82C: 84, - 0xA840: 68, - 0xA841: 68, - 0xA842: 68, - 0xA843: 68, - 0xA844: 68, - 0xA845: 68, - 0xA846: 68, - 0xA847: 68, - 0xA848: 68, - 0xA849: 68, - 0xA84A: 68, - 0xA84B: 68, - 0xA84C: 68, - 0xA84D: 68, - 0xA84E: 68, - 0xA84F: 68, - 0xA850: 68, - 0xA851: 68, - 0xA852: 68, - 0xA853: 68, - 0xA854: 68, - 0xA855: 68, - 0xA856: 68, - 0xA857: 68, - 0xA858: 68, - 0xA859: 68, - 0xA85A: 68, - 0xA85B: 68, - 0xA85C: 68, - 0xA85D: 68, - 0xA85E: 68, - 0xA85F: 68, - 0xA860: 68, - 0xA861: 68, - 0xA862: 68, - 0xA863: 68, - 0xA864: 68, - 0xA865: 68, - 0xA866: 68, - 0xA867: 68, - 0xA868: 68, - 0xA869: 68, - 0xA86A: 68, - 0xA86B: 68, - 0xA86C: 68, - 0xA86D: 68, - 0xA86E: 68, - 0xA86F: 68, - 0xA870: 68, - 0xA871: 68, - 0xA872: 76, - 0xA8C4: 84, - 0xA8C5: 84, - 0xA8E0: 84, - 0xA8E1: 84, - 0xA8E2: 84, - 0xA8E3: 84, - 0xA8E4: 84, - 0xA8E5: 84, - 0xA8E6: 84, - 0xA8E7: 84, - 0xA8E8: 84, - 0xA8E9: 84, - 0xA8EA: 84, - 0xA8EB: 84, - 0xA8EC: 84, - 0xA8ED: 84, - 0xA8EE: 84, - 0xA8EF: 84, - 0xA8F0: 84, - 0xA8F1: 84, - 0xA8FF: 84, - 0xA926: 84, - 0xA927: 84, - 0xA928: 84, - 0xA929: 84, - 0xA92A: 84, - 0xA92B: 84, - 0xA92C: 84, - 0xA92D: 84, - 0xA947: 84, - 0xA948: 84, - 0xA949: 84, - 0xA94A: 84, - 0xA94B: 84, - 0xA94C: 84, - 0xA94D: 84, - 0xA94E: 84, - 0xA94F: 84, - 0xA950: 84, - 0xA951: 84, - 0xA980: 84, - 0xA981: 84, - 0xA982: 84, - 0xA9B3: 84, - 0xA9B6: 84, - 0xA9B7: 84, - 0xA9B8: 84, - 0xA9B9: 84, - 0xA9BC: 84, - 0xA9BD: 84, - 0xA9E5: 84, - 0xAA29: 84, - 0xAA2A: 84, - 0xAA2B: 84, - 0xAA2C: 84, - 0xAA2D: 84, - 0xAA2E: 84, - 0xAA31: 84, - 0xAA32: 84, - 0xAA35: 84, - 0xAA36: 84, - 0xAA43: 84, - 0xAA4C: 84, - 0xAA7C: 84, - 0xAAB0: 84, - 0xAAB2: 84, - 0xAAB3: 84, - 0xAAB4: 84, - 0xAAB7: 84, - 0xAAB8: 84, - 0xAABE: 84, - 0xAABF: 84, - 0xAAC1: 84, - 0xAAEC: 84, - 0xAAED: 84, - 0xAAF6: 84, - 0xABE5: 84, - 0xABE8: 84, - 0xABED: 84, - 0xFB1E: 84, - 0xFE00: 84, - 0xFE01: 84, - 0xFE02: 84, - 0xFE03: 84, - 0xFE04: 84, - 0xFE05: 84, - 0xFE06: 84, - 0xFE07: 84, - 0xFE08: 84, - 0xFE09: 84, - 0xFE0A: 84, - 0xFE0B: 84, - 0xFE0C: 84, - 0xFE0D: 84, - 0xFE0E: 84, - 0xFE0F: 84, - 0xFE20: 84, - 0xFE21: 84, - 0xFE22: 84, - 0xFE23: 84, - 0xFE24: 84, - 0xFE25: 84, - 0xFE26: 84, - 0xFE27: 84, - 0xFE28: 84, - 0xFE29: 84, - 0xFE2A: 84, - 0xFE2B: 84, - 0xFE2C: 84, - 0xFE2D: 84, - 0xFE2E: 84, - 0xFE2F: 84, - 0xFEFF: 84, - 0xFFF9: 84, - 0xFFFA: 84, - 0xFFFB: 84, - 0x101FD: 84, - 0x102E0: 84, - 0x10376: 84, - 0x10377: 84, - 0x10378: 84, - 0x10379: 84, - 0x1037A: 84, - 0x10A01: 84, - 0x10A02: 84, - 0x10A03: 84, - 0x10A05: 84, - 0x10A06: 84, - 0x10A0C: 84, - 0x10A0D: 84, - 0x10A0E: 84, - 0x10A0F: 84, - 0x10A38: 84, - 0x10A39: 84, - 0x10A3A: 84, - 0x10A3F: 84, - 0x10AC0: 68, - 0x10AC1: 68, - 0x10AC2: 68, - 0x10AC3: 68, - 0x10AC4: 68, - 0x10AC5: 82, - 0x10AC7: 82, - 0x10AC9: 82, - 0x10ACA: 82, - 0x10ACD: 76, - 0x10ACE: 82, - 0x10ACF: 82, - 0x10AD0: 82, - 0x10AD1: 82, - 0x10AD2: 82, - 0x10AD3: 68, - 0x10AD4: 68, - 0x10AD5: 68, - 0x10AD6: 68, - 0x10AD7: 76, - 0x10AD8: 68, - 0x10AD9: 68, - 0x10ADA: 68, - 0x10ADB: 68, - 0x10ADC: 68, - 0x10ADD: 82, - 0x10ADE: 68, - 0x10ADF: 68, - 0x10AE0: 68, - 0x10AE1: 82, - 0x10AE4: 82, - 0x10AE5: 84, - 0x10AE6: 84, - 0x10AEB: 68, - 0x10AEC: 68, - 0x10AED: 68, - 0x10AEE: 68, - 0x10AEF: 82, - 0x10B80: 68, - 0x10B81: 82, - 0x10B82: 68, - 0x10B83: 82, - 0x10B84: 82, - 0x10B85: 82, - 0x10B86: 68, - 0x10B87: 68, - 0x10B88: 68, - 0x10B89: 82, - 0x10B8A: 68, - 0x10B8B: 68, - 0x10B8C: 82, - 0x10B8D: 68, - 0x10B8E: 82, - 0x10B8F: 82, - 0x10B90: 68, - 0x10B91: 82, - 0x10BA9: 82, - 0x10BAA: 82, - 0x10BAB: 82, - 0x10BAC: 82, - 0x10BAD: 68, - 0x10BAE: 68, - 0x10D00: 76, - 0x10D01: 68, - 0x10D02: 68, - 0x10D03: 68, - 0x10D04: 68, - 0x10D05: 68, - 0x10D06: 68, - 0x10D07: 68, - 0x10D08: 68, - 0x10D09: 68, - 0x10D0A: 68, - 0x10D0B: 68, - 0x10D0C: 68, - 0x10D0D: 68, - 0x10D0E: 68, - 0x10D0F: 68, - 0x10D10: 68, - 0x10D11: 68, - 0x10D12: 68, - 0x10D13: 68, - 0x10D14: 68, - 0x10D15: 68, - 0x10D16: 68, - 0x10D17: 68, - 0x10D18: 68, - 0x10D19: 68, - 0x10D1A: 68, - 0x10D1B: 68, - 0x10D1C: 68, - 0x10D1D: 68, - 0x10D1E: 68, - 0x10D1F: 68, - 0x10D20: 68, - 0x10D21: 68, - 0x10D22: 82, - 0x10D23: 68, - 0x10D24: 84, - 0x10D25: 84, - 0x10D26: 84, - 0x10D27: 84, - 0x10EAB: 84, - 0x10EAC: 84, - 0x10EFD: 84, - 0x10EFE: 84, - 0x10EFF: 84, - 0x10F30: 68, - 0x10F31: 68, - 0x10F32: 68, - 0x10F33: 82, - 0x10F34: 68, - 0x10F35: 68, - 0x10F36: 68, - 0x10F37: 68, - 0x10F38: 68, - 0x10F39: 68, - 0x10F3A: 68, - 0x10F3B: 68, - 0x10F3C: 68, - 0x10F3D: 68, - 0x10F3E: 68, - 0x10F3F: 68, - 0x10F40: 68, - 0x10F41: 68, - 0x10F42: 68, - 0x10F43: 68, - 0x10F44: 68, - 0x10F46: 84, - 0x10F47: 84, - 0x10F48: 84, - 0x10F49: 84, - 0x10F4A: 84, - 0x10F4B: 84, - 0x10F4C: 84, - 0x10F4D: 84, - 0x10F4E: 84, - 0x10F4F: 84, - 0x10F50: 84, - 0x10F51: 68, - 0x10F52: 68, - 0x10F53: 68, - 0x10F54: 82, - 0x10F70: 68, - 0x10F71: 68, - 0x10F72: 68, - 0x10F73: 68, - 0x10F74: 82, - 0x10F75: 82, - 0x10F76: 68, - 0x10F77: 68, - 0x10F78: 68, - 0x10F79: 68, - 0x10F7A: 68, - 0x10F7B: 68, - 0x10F7C: 68, - 0x10F7D: 68, - 0x10F7E: 68, - 0x10F7F: 68, - 0x10F80: 68, - 0x10F81: 68, - 0x10F82: 84, - 0x10F83: 84, - 0x10F84: 84, - 0x10F85: 84, - 0x10FB0: 68, - 0x10FB2: 68, - 0x10FB3: 68, - 0x10FB4: 82, - 0x10FB5: 82, - 0x10FB6: 82, - 0x10FB8: 68, - 0x10FB9: 82, - 0x10FBA: 82, - 0x10FBB: 68, - 0x10FBC: 68, - 0x10FBD: 82, - 0x10FBE: 68, - 0x10FBF: 68, - 0x10FC1: 68, - 0x10FC2: 82, - 0x10FC3: 82, - 0x10FC4: 68, - 0x10FC9: 82, - 0x10FCA: 68, - 0x10FCB: 76, - 0x11001: 84, - 0x11038: 84, - 0x11039: 84, - 0x1103A: 84, - 0x1103B: 84, - 0x1103C: 84, - 0x1103D: 84, - 0x1103E: 84, - 0x1103F: 84, - 0x11040: 84, - 0x11041: 84, - 0x11042: 84, - 0x11043: 84, - 0x11044: 84, - 0x11045: 84, - 0x11046: 84, - 0x11070: 84, - 0x11073: 84, - 0x11074: 84, - 0x1107F: 84, - 0x11080: 84, - 0x11081: 84, - 0x110B3: 84, - 0x110B4: 84, - 0x110B5: 84, - 0x110B6: 84, - 0x110B9: 84, - 0x110BA: 84, - 0x110C2: 84, - 0x11100: 84, - 0x11101: 84, - 0x11102: 84, - 0x11127: 84, - 0x11128: 84, - 0x11129: 84, - 0x1112A: 84, - 0x1112B: 84, - 0x1112D: 84, - 0x1112E: 84, - 0x1112F: 84, - 0x11130: 84, - 0x11131: 84, - 0x11132: 84, - 0x11133: 84, - 0x11134: 84, - 0x11173: 84, - 0x11180: 84, - 0x11181: 84, - 0x111B6: 84, - 0x111B7: 84, - 0x111B8: 84, - 0x111B9: 84, - 0x111BA: 84, - 0x111BB: 84, - 0x111BC: 84, - 0x111BD: 84, - 0x111BE: 84, - 0x111C9: 84, - 0x111CA: 84, - 0x111CB: 84, - 0x111CC: 84, - 0x111CF: 84, - 0x1122F: 84, - 0x11230: 84, - 0x11231: 84, - 0x11234: 84, - 0x11236: 84, - 0x11237: 84, - 0x1123E: 84, - 0x11241: 84, - 0x112DF: 84, - 0x112E3: 84, - 0x112E4: 84, - 0x112E5: 84, - 0x112E6: 84, - 0x112E7: 84, - 0x112E8: 84, - 0x112E9: 84, - 0x112EA: 84, - 0x11300: 84, - 0x11301: 84, - 0x1133B: 84, - 0x1133C: 84, - 0x11340: 84, - 0x11366: 84, - 0x11367: 84, - 0x11368: 84, - 0x11369: 84, - 0x1136A: 84, - 0x1136B: 84, - 0x1136C: 84, - 0x11370: 84, - 0x11371: 84, - 0x11372: 84, - 0x11373: 84, - 0x11374: 84, - 0x11438: 84, - 0x11439: 84, - 0x1143A: 84, - 0x1143B: 84, - 0x1143C: 84, - 0x1143D: 84, - 0x1143E: 84, - 0x1143F: 84, - 0x11442: 84, - 0x11443: 84, - 0x11444: 84, - 0x11446: 84, - 0x1145E: 84, - 0x114B3: 84, - 0x114B4: 84, - 0x114B5: 84, - 0x114B6: 84, - 0x114B7: 84, - 0x114B8: 84, - 0x114BA: 84, - 0x114BF: 84, - 0x114C0: 84, - 0x114C2: 84, - 0x114C3: 84, - 0x115B2: 84, - 0x115B3: 84, - 0x115B4: 84, - 0x115B5: 84, - 0x115BC: 84, - 0x115BD: 84, - 0x115BF: 84, - 0x115C0: 84, - 0x115DC: 84, - 0x115DD: 84, - 0x11633: 84, - 0x11634: 84, - 0x11635: 84, - 0x11636: 84, - 0x11637: 84, - 0x11638: 84, - 0x11639: 84, - 0x1163A: 84, - 0x1163D: 84, - 0x1163F: 84, - 0x11640: 84, - 0x116AB: 84, - 0x116AD: 84, - 0x116B0: 84, - 0x116B1: 84, - 0x116B2: 84, - 0x116B3: 84, - 0x116B4: 84, - 0x116B5: 84, - 0x116B7: 84, - 0x1171D: 84, - 0x1171E: 84, - 0x1171F: 84, - 0x11722: 84, - 0x11723: 84, - 0x11724: 84, - 0x11725: 84, - 0x11727: 84, - 0x11728: 84, - 0x11729: 84, - 0x1172A: 84, - 0x1172B: 84, - 0x1182F: 84, - 0x11830: 84, - 0x11831: 84, - 0x11832: 84, - 0x11833: 84, - 0x11834: 84, - 0x11835: 84, - 0x11836: 84, - 0x11837: 84, - 0x11839: 84, - 0x1183A: 84, - 0x1193B: 84, - 0x1193C: 84, - 0x1193E: 84, - 0x11943: 84, - 0x119D4: 84, - 0x119D5: 84, - 0x119D6: 84, - 0x119D7: 84, - 0x119DA: 84, - 0x119DB: 84, - 0x119E0: 84, - 0x11A01: 84, - 0x11A02: 84, - 0x11A03: 84, - 0x11A04: 84, - 0x11A05: 84, - 0x11A06: 84, - 0x11A07: 84, - 0x11A08: 84, - 0x11A09: 84, - 0x11A0A: 84, - 0x11A33: 84, - 0x11A34: 84, - 0x11A35: 84, - 0x11A36: 84, - 0x11A37: 84, - 0x11A38: 84, - 0x11A3B: 84, - 0x11A3C: 84, - 0x11A3D: 84, - 0x11A3E: 84, - 0x11A47: 84, - 0x11A51: 84, - 0x11A52: 84, - 0x11A53: 84, - 0x11A54: 84, - 0x11A55: 84, - 0x11A56: 84, - 0x11A59: 84, - 0x11A5A: 84, - 0x11A5B: 84, - 0x11A8A: 84, - 0x11A8B: 84, - 0x11A8C: 84, - 0x11A8D: 84, - 0x11A8E: 84, - 0x11A8F: 84, - 0x11A90: 84, - 0x11A91: 84, - 0x11A92: 84, - 0x11A93: 84, - 0x11A94: 84, - 0x11A95: 84, - 0x11A96: 84, - 0x11A98: 84, - 0x11A99: 84, - 0x11C30: 84, - 0x11C31: 84, - 0x11C32: 84, - 0x11C33: 84, - 0x11C34: 84, - 0x11C35: 84, - 0x11C36: 84, - 0x11C38: 84, - 0x11C39: 84, - 0x11C3A: 84, - 0x11C3B: 84, - 0x11C3C: 84, - 0x11C3D: 84, - 0x11C3F: 84, - 0x11C92: 84, - 0x11C93: 84, - 0x11C94: 84, - 0x11C95: 84, - 0x11C96: 84, - 0x11C97: 84, - 0x11C98: 84, - 0x11C99: 84, - 0x11C9A: 84, - 0x11C9B: 84, - 0x11C9C: 84, - 0x11C9D: 84, - 0x11C9E: 84, - 0x11C9F: 84, - 0x11CA0: 84, - 0x11CA1: 84, - 0x11CA2: 84, - 0x11CA3: 84, - 0x11CA4: 84, - 0x11CA5: 84, - 0x11CA6: 84, - 0x11CA7: 84, - 0x11CAA: 84, - 0x11CAB: 84, - 0x11CAC: 84, - 0x11CAD: 84, - 0x11CAE: 84, - 0x11CAF: 84, - 0x11CB0: 84, - 0x11CB2: 84, - 0x11CB3: 84, - 0x11CB5: 84, - 0x11CB6: 84, - 0x11D31: 84, - 0x11D32: 84, - 0x11D33: 84, - 0x11D34: 84, - 0x11D35: 84, - 0x11D36: 84, - 0x11D3A: 84, - 0x11D3C: 84, - 0x11D3D: 84, - 0x11D3F: 84, - 0x11D40: 84, - 0x11D41: 84, - 0x11D42: 84, - 0x11D43: 84, - 0x11D44: 84, - 0x11D45: 84, - 0x11D47: 84, - 0x11D90: 84, - 0x11D91: 84, - 0x11D95: 84, - 0x11D97: 84, - 0x11EF3: 84, - 0x11EF4: 84, - 0x11F00: 84, - 0x11F01: 84, - 0x11F36: 84, - 0x11F37: 84, - 0x11F38: 84, - 0x11F39: 84, - 0x11F3A: 84, - 0x11F40: 84, - 0x11F42: 84, - 0x13430: 84, - 0x13431: 84, - 0x13432: 84, - 0x13433: 84, - 0x13434: 84, - 0x13435: 84, - 0x13436: 84, - 0x13437: 84, - 0x13438: 84, - 0x13439: 84, - 0x1343A: 84, - 0x1343B: 84, - 0x1343C: 84, - 0x1343D: 84, - 0x1343E: 84, - 0x1343F: 84, - 0x13440: 84, - 0x13447: 84, - 0x13448: 84, - 0x13449: 84, - 0x1344A: 84, - 0x1344B: 84, - 0x1344C: 84, - 0x1344D: 84, - 0x1344E: 84, - 0x1344F: 84, - 0x13450: 84, - 0x13451: 84, - 0x13452: 84, - 0x13453: 84, - 0x13454: 84, - 0x13455: 84, - 0x16AF0: 84, - 0x16AF1: 84, - 0x16AF2: 84, - 0x16AF3: 84, - 0x16AF4: 84, - 0x16B30: 84, - 0x16B31: 84, - 0x16B32: 84, - 0x16B33: 84, - 0x16B34: 84, - 0x16B35: 84, - 0x16B36: 84, - 0x16F4F: 84, - 0x16F8F: 84, - 0x16F90: 84, - 0x16F91: 84, - 0x16F92: 84, - 0x16FE4: 84, - 0x1BC9D: 84, - 0x1BC9E: 84, - 0x1BCA0: 84, - 0x1BCA1: 84, - 0x1BCA2: 84, - 0x1BCA3: 84, - 0x1CF00: 84, - 0x1CF01: 84, - 0x1CF02: 84, - 0x1CF03: 84, - 0x1CF04: 84, - 0x1CF05: 84, - 0x1CF06: 84, - 0x1CF07: 84, - 0x1CF08: 84, - 0x1CF09: 84, - 0x1CF0A: 84, - 0x1CF0B: 84, - 0x1CF0C: 84, - 0x1CF0D: 84, - 0x1CF0E: 84, - 0x1CF0F: 84, - 0x1CF10: 84, - 0x1CF11: 84, - 0x1CF12: 84, - 0x1CF13: 84, - 0x1CF14: 84, - 0x1CF15: 84, - 0x1CF16: 84, - 0x1CF17: 84, - 0x1CF18: 84, - 0x1CF19: 84, - 0x1CF1A: 84, - 0x1CF1B: 84, - 0x1CF1C: 84, - 0x1CF1D: 84, - 0x1CF1E: 84, - 0x1CF1F: 84, - 0x1CF20: 84, - 0x1CF21: 84, - 0x1CF22: 84, - 0x1CF23: 84, - 0x1CF24: 84, - 0x1CF25: 84, - 0x1CF26: 84, - 0x1CF27: 84, - 0x1CF28: 84, - 0x1CF29: 84, - 0x1CF2A: 84, - 0x1CF2B: 84, - 0x1CF2C: 84, - 0x1CF2D: 84, - 0x1CF30: 84, - 0x1CF31: 84, - 0x1CF32: 84, - 0x1CF33: 84, - 0x1CF34: 84, - 0x1CF35: 84, - 0x1CF36: 84, - 0x1CF37: 84, - 0x1CF38: 84, - 0x1CF39: 84, - 0x1CF3A: 84, - 0x1CF3B: 84, - 0x1CF3C: 84, - 0x1CF3D: 84, - 0x1CF3E: 84, - 0x1CF3F: 84, - 0x1CF40: 84, - 0x1CF41: 84, - 0x1CF42: 84, - 0x1CF43: 84, - 0x1CF44: 84, - 0x1CF45: 84, - 0x1CF46: 84, - 0x1D167: 84, - 0x1D168: 84, - 0x1D169: 84, - 0x1D173: 84, - 0x1D174: 84, - 0x1D175: 84, - 0x1D176: 84, - 0x1D177: 84, - 0x1D178: 84, - 0x1D179: 84, - 0x1D17A: 84, - 0x1D17B: 84, - 0x1D17C: 84, - 0x1D17D: 84, - 0x1D17E: 84, - 0x1D17F: 84, - 0x1D180: 84, - 0x1D181: 84, - 0x1D182: 84, - 0x1D185: 84, - 0x1D186: 84, - 0x1D187: 84, - 0x1D188: 84, - 0x1D189: 84, - 0x1D18A: 84, - 0x1D18B: 84, - 0x1D1AA: 84, - 0x1D1AB: 84, - 0x1D1AC: 84, - 0x1D1AD: 84, - 0x1D242: 84, - 0x1D243: 84, - 0x1D244: 84, - 0x1DA00: 84, - 0x1DA01: 84, - 0x1DA02: 84, - 0x1DA03: 84, - 0x1DA04: 84, - 0x1DA05: 84, - 0x1DA06: 84, - 0x1DA07: 84, - 0x1DA08: 84, - 0x1DA09: 84, - 0x1DA0A: 84, - 0x1DA0B: 84, - 0x1DA0C: 84, - 0x1DA0D: 84, - 0x1DA0E: 84, - 0x1DA0F: 84, - 0x1DA10: 84, - 0x1DA11: 84, - 0x1DA12: 84, - 0x1DA13: 84, - 0x1DA14: 84, - 0x1DA15: 84, - 0x1DA16: 84, - 0x1DA17: 84, - 0x1DA18: 84, - 0x1DA19: 84, - 0x1DA1A: 84, - 0x1DA1B: 84, - 0x1DA1C: 84, - 0x1DA1D: 84, - 0x1DA1E: 84, - 0x1DA1F: 84, - 0x1DA20: 84, - 0x1DA21: 84, - 0x1DA22: 84, - 0x1DA23: 84, - 0x1DA24: 84, - 0x1DA25: 84, - 0x1DA26: 84, - 0x1DA27: 84, - 0x1DA28: 84, - 0x1DA29: 84, - 0x1DA2A: 84, - 0x1DA2B: 84, - 0x1DA2C: 84, - 0x1DA2D: 84, - 0x1DA2E: 84, - 0x1DA2F: 84, - 0x1DA30: 84, - 0x1DA31: 84, - 0x1DA32: 84, - 0x1DA33: 84, - 0x1DA34: 84, - 0x1DA35: 84, - 0x1DA36: 84, - 0x1DA3B: 84, - 0x1DA3C: 84, - 0x1DA3D: 84, - 0x1DA3E: 84, - 0x1DA3F: 84, - 0x1DA40: 84, - 0x1DA41: 84, - 0x1DA42: 84, - 0x1DA43: 84, - 0x1DA44: 84, - 0x1DA45: 84, - 0x1DA46: 84, - 0x1DA47: 84, - 0x1DA48: 84, - 0x1DA49: 84, - 0x1DA4A: 84, - 0x1DA4B: 84, - 0x1DA4C: 84, - 0x1DA4D: 84, - 0x1DA4E: 84, - 0x1DA4F: 84, - 0x1DA50: 84, - 0x1DA51: 84, - 0x1DA52: 84, - 0x1DA53: 84, - 0x1DA54: 84, - 0x1DA55: 84, - 0x1DA56: 84, - 0x1DA57: 84, - 0x1DA58: 84, - 0x1DA59: 84, - 0x1DA5A: 84, - 0x1DA5B: 84, - 0x1DA5C: 84, - 0x1DA5D: 84, - 0x1DA5E: 84, - 0x1DA5F: 84, - 0x1DA60: 84, - 0x1DA61: 84, - 0x1DA62: 84, - 0x1DA63: 84, - 0x1DA64: 84, - 0x1DA65: 84, - 0x1DA66: 84, - 0x1DA67: 84, - 0x1DA68: 84, - 0x1DA69: 84, - 0x1DA6A: 84, - 0x1DA6B: 84, - 0x1DA6C: 84, - 0x1DA75: 84, - 0x1DA84: 84, - 0x1DA9B: 84, - 0x1DA9C: 84, - 0x1DA9D: 84, - 0x1DA9E: 84, - 0x1DA9F: 84, - 0x1DAA1: 84, - 0x1DAA2: 84, - 0x1DAA3: 84, - 0x1DAA4: 84, - 0x1DAA5: 84, - 0x1DAA6: 84, - 0x1DAA7: 84, - 0x1DAA8: 84, - 0x1DAA9: 84, - 0x1DAAA: 84, - 0x1DAAB: 84, - 0x1DAAC: 84, - 0x1DAAD: 84, - 0x1DAAE: 84, - 0x1DAAF: 84, - 0x1E000: 84, - 0x1E001: 84, - 0x1E002: 84, - 0x1E003: 84, - 0x1E004: 84, - 0x1E005: 84, - 0x1E006: 84, - 0x1E008: 84, - 0x1E009: 84, - 0x1E00A: 84, - 0x1E00B: 84, - 0x1E00C: 84, - 0x1E00D: 84, - 0x1E00E: 84, - 0x1E00F: 84, - 0x1E010: 84, - 0x1E011: 84, - 0x1E012: 84, - 0x1E013: 84, - 0x1E014: 84, - 0x1E015: 84, - 0x1E016: 84, - 0x1E017: 84, - 0x1E018: 84, - 0x1E01B: 84, - 0x1E01C: 84, - 0x1E01D: 84, - 0x1E01E: 84, - 0x1E01F: 84, - 0x1E020: 84, - 0x1E021: 84, - 0x1E023: 84, - 0x1E024: 84, - 0x1E026: 84, - 0x1E027: 84, - 0x1E028: 84, - 0x1E029: 84, - 0x1E02A: 84, - 0x1E08F: 84, - 0x1E130: 84, - 0x1E131: 84, - 0x1E132: 84, - 0x1E133: 84, - 0x1E134: 84, - 0x1E135: 84, - 0x1E136: 84, - 0x1E2AE: 84, - 0x1E2EC: 84, - 0x1E2ED: 84, - 0x1E2EE: 84, - 0x1E2EF: 84, - 0x1E4EC: 84, - 0x1E4ED: 84, - 0x1E4EE: 84, - 0x1E4EF: 84, - 0x1E8D0: 84, - 0x1E8D1: 84, - 0x1E8D2: 84, - 0x1E8D3: 84, - 0x1E8D4: 84, - 0x1E8D5: 84, - 0x1E8D6: 84, - 0x1E900: 68, - 0x1E901: 68, - 0x1E902: 68, - 0x1E903: 68, - 0x1E904: 68, - 0x1E905: 68, - 0x1E906: 68, - 0x1E907: 68, - 0x1E908: 68, - 0x1E909: 68, - 0x1E90A: 68, - 0x1E90B: 68, - 0x1E90C: 68, - 0x1E90D: 68, - 0x1E90E: 68, - 0x1E90F: 68, - 0x1E910: 68, - 0x1E911: 68, - 0x1E912: 68, - 0x1E913: 68, - 0x1E914: 68, - 0x1E915: 68, - 0x1E916: 68, - 0x1E917: 68, - 0x1E918: 68, - 0x1E919: 68, - 0x1E91A: 68, - 0x1E91B: 68, - 0x1E91C: 68, - 0x1E91D: 68, - 0x1E91E: 68, - 0x1E91F: 68, - 0x1E920: 68, - 0x1E921: 68, - 0x1E922: 68, - 0x1E923: 68, - 0x1E924: 68, - 0x1E925: 68, - 0x1E926: 68, - 0x1E927: 68, - 0x1E928: 68, - 0x1E929: 68, - 0x1E92A: 68, - 0x1E92B: 68, - 0x1E92C: 68, - 0x1E92D: 68, - 0x1E92E: 68, - 0x1E92F: 68, - 0x1E930: 68, - 0x1E931: 68, - 0x1E932: 68, - 0x1E933: 68, - 0x1E934: 68, - 0x1E935: 68, - 0x1E936: 68, - 0x1E937: 68, - 0x1E938: 68, - 0x1E939: 68, - 0x1E93A: 68, - 0x1E93B: 68, - 0x1E93C: 68, - 0x1E93D: 68, - 0x1E93E: 68, - 0x1E93F: 68, - 0x1E940: 68, - 0x1E941: 68, - 0x1E942: 68, - 0x1E943: 68, - 0x1E944: 84, - 0x1E945: 84, - 0x1E946: 84, - 0x1E947: 84, - 0x1E948: 84, - 0x1E949: 84, - 0x1E94A: 84, - 0x1E94B: 84, - 0xE0001: 84, - 0xE0020: 84, - 0xE0021: 84, - 0xE0022: 84, - 0xE0023: 84, - 0xE0024: 84, - 0xE0025: 84, - 0xE0026: 84, - 0xE0027: 84, - 0xE0028: 84, - 0xE0029: 84, - 0xE002A: 84, - 0xE002B: 84, - 0xE002C: 84, - 0xE002D: 84, - 0xE002E: 84, - 0xE002F: 84, - 0xE0030: 84, - 0xE0031: 84, - 0xE0032: 84, - 0xE0033: 84, - 0xE0034: 84, - 0xE0035: 84, - 0xE0036: 84, - 0xE0037: 84, - 0xE0038: 84, - 0xE0039: 84, - 0xE003A: 84, - 0xE003B: 84, - 0xE003C: 84, - 0xE003D: 84, - 0xE003E: 84, - 0xE003F: 84, - 0xE0040: 84, - 0xE0041: 84, - 0xE0042: 84, - 0xE0043: 84, - 0xE0044: 84, - 0xE0045: 84, - 0xE0046: 84, - 0xE0047: 84, - 0xE0048: 84, - 0xE0049: 84, - 0xE004A: 84, - 0xE004B: 84, - 0xE004C: 84, - 0xE004D: 84, - 0xE004E: 84, - 0xE004F: 84, - 0xE0050: 84, - 0xE0051: 84, - 0xE0052: 84, - 0xE0053: 84, - 0xE0054: 84, - 0xE0055: 84, - 0xE0056: 84, - 0xE0057: 84, - 0xE0058: 84, - 0xE0059: 84, - 0xE005A: 84, - 0xE005B: 84, - 0xE005C: 84, - 0xE005D: 84, - 0xE005E: 84, - 0xE005F: 84, - 0xE0060: 84, - 0xE0061: 84, - 0xE0062: 84, - 0xE0063: 84, - 0xE0064: 84, - 0xE0065: 84, - 0xE0066: 84, - 0xE0067: 84, - 0xE0068: 84, - 0xE0069: 84, - 0xE006A: 84, - 0xE006B: 84, - 0xE006C: 84, - 0xE006D: 84, - 0xE006E: 84, - 0xE006F: 84, - 0xE0070: 84, - 0xE0071: 84, - 0xE0072: 84, - 0xE0073: 84, - 0xE0074: 84, - 0xE0075: 84, - 0xE0076: 84, - 0xE0077: 84, - 0xE0078: 84, - 0xE0079: 84, - 0xE007A: 84, - 0xE007B: 84, - 0xE007C: 84, - 0xE007D: 84, - 0xE007E: 84, - 0xE007F: 84, - 0xE0100: 84, - 0xE0101: 84, - 0xE0102: 84, - 0xE0103: 84, - 0xE0104: 84, - 0xE0105: 84, - 0xE0106: 84, - 0xE0107: 84, - 0xE0108: 84, - 0xE0109: 84, - 0xE010A: 84, - 0xE010B: 84, - 0xE010C: 84, - 0xE010D: 84, - 0xE010E: 84, - 0xE010F: 84, - 0xE0110: 84, - 0xE0111: 84, - 0xE0112: 84, - 0xE0113: 84, - 0xE0114: 84, - 0xE0115: 84, - 0xE0116: 84, - 0xE0117: 84, - 0xE0118: 84, - 0xE0119: 84, - 0xE011A: 84, - 0xE011B: 84, - 0xE011C: 84, - 0xE011D: 84, - 0xE011E: 84, - 0xE011F: 84, - 0xE0120: 84, - 0xE0121: 84, - 0xE0122: 84, - 0xE0123: 84, - 0xE0124: 84, - 0xE0125: 84, - 0xE0126: 84, - 0xE0127: 84, - 0xE0128: 84, - 0xE0129: 84, - 0xE012A: 84, - 0xE012B: 84, - 0xE012C: 84, - 0xE012D: 84, - 0xE012E: 84, - 0xE012F: 84, - 0xE0130: 84, - 0xE0131: 84, - 0xE0132: 84, - 0xE0133: 84, - 0xE0134: 84, - 0xE0135: 84, - 0xE0136: 84, - 0xE0137: 84, - 0xE0138: 84, - 0xE0139: 84, - 0xE013A: 84, - 0xE013B: 84, - 0xE013C: 84, - 0xE013D: 84, - 0xE013E: 84, - 0xE013F: 84, - 0xE0140: 84, - 0xE0141: 84, - 0xE0142: 84, - 0xE0143: 84, - 0xE0144: 84, - 0xE0145: 84, - 0xE0146: 84, - 0xE0147: 84, - 0xE0148: 84, - 0xE0149: 84, - 0xE014A: 84, - 0xE014B: 84, - 0xE014C: 84, - 0xE014D: 84, - 0xE014E: 84, - 0xE014F: 84, - 0xE0150: 84, - 0xE0151: 84, - 0xE0152: 84, - 0xE0153: 84, - 0xE0154: 84, - 0xE0155: 84, - 0xE0156: 84, - 0xE0157: 84, - 0xE0158: 84, - 0xE0159: 84, - 0xE015A: 84, - 0xE015B: 84, - 0xE015C: 84, - 0xE015D: 84, - 0xE015E: 84, - 0xE015F: 84, - 0xE0160: 84, - 0xE0161: 84, - 0xE0162: 84, - 0xE0163: 84, - 0xE0164: 84, - 0xE0165: 84, - 0xE0166: 84, - 0xE0167: 84, - 0xE0168: 84, - 0xE0169: 84, - 0xE016A: 84, - 0xE016B: 84, - 0xE016C: 84, - 0xE016D: 84, - 0xE016E: 84, - 0xE016F: 84, - 0xE0170: 84, - 0xE0171: 84, - 0xE0172: 84, - 0xE0173: 84, - 0xE0174: 84, - 0xE0175: 84, - 0xE0176: 84, - 0xE0177: 84, - 0xE0178: 84, - 0xE0179: 84, - 0xE017A: 84, - 0xE017B: 84, - 0xE017C: 84, - 0xE017D: 84, - 0xE017E: 84, - 0xE017F: 84, - 0xE0180: 84, - 0xE0181: 84, - 0xE0182: 84, - 0xE0183: 84, - 0xE0184: 84, - 0xE0185: 84, - 0xE0186: 84, - 0xE0187: 84, - 0xE0188: 84, - 0xE0189: 84, - 0xE018A: 84, - 0xE018B: 84, - 0xE018C: 84, - 0xE018D: 84, - 0xE018E: 84, - 0xE018F: 84, - 0xE0190: 84, - 0xE0191: 84, - 0xE0192: 84, - 0xE0193: 84, - 0xE0194: 84, - 0xE0195: 84, - 0xE0196: 84, - 0xE0197: 84, - 0xE0198: 84, - 0xE0199: 84, - 0xE019A: 84, - 0xE019B: 84, - 0xE019C: 84, - 0xE019D: 84, - 0xE019E: 84, - 0xE019F: 84, - 0xE01A0: 84, - 0xE01A1: 84, - 0xE01A2: 84, - 0xE01A3: 84, - 0xE01A4: 84, - 0xE01A5: 84, - 0xE01A6: 84, - 0xE01A7: 84, - 0xE01A8: 84, - 0xE01A9: 84, - 0xE01AA: 84, - 0xE01AB: 84, - 0xE01AC: 84, - 0xE01AD: 84, - 0xE01AE: 84, - 0xE01AF: 84, - 0xE01B0: 84, - 0xE01B1: 84, - 0xE01B2: 84, - 0xE01B3: 84, - 0xE01B4: 84, - 0xE01B5: 84, - 0xE01B6: 84, - 0xE01B7: 84, - 0xE01B8: 84, - 0xE01B9: 84, - 0xE01BA: 84, - 0xE01BB: 84, - 0xE01BC: 84, - 0xE01BD: 84, - 0xE01BE: 84, - 0xE01BF: 84, - 0xE01C0: 84, - 0xE01C1: 84, - 0xE01C2: 84, - 0xE01C3: 84, - 0xE01C4: 84, - 0xE01C5: 84, - 0xE01C6: 84, - 0xE01C7: 84, - 0xE01C8: 84, - 0xE01C9: 84, - 0xE01CA: 84, - 0xE01CB: 84, - 0xE01CC: 84, - 0xE01CD: 84, - 0xE01CE: 84, - 0xE01CF: 84, - 0xE01D0: 84, - 0xE01D1: 84, - 0xE01D2: 84, - 0xE01D3: 84, - 0xE01D4: 84, - 0xE01D5: 84, - 0xE01D6: 84, - 0xE01D7: 84, - 0xE01D8: 84, - 0xE01D9: 84, - 0xE01DA: 84, - 0xE01DB: 84, - 0xE01DC: 84, - 0xE01DD: 84, - 0xE01DE: 84, - 0xE01DF: 84, - 0xE01E0: 84, - 0xE01E1: 84, - 0xE01E2: 84, - 0xE01E3: 84, - 0xE01E4: 84, - 0xE01E5: 84, - 0xE01E6: 84, - 0xE01E7: 84, - 0xE01E8: 84, - 0xE01E9: 84, - 0xE01EA: 84, - 0xE01EB: 84, - 0xE01EC: 84, - 0xE01ED: 84, - 0xE01EE: 84, - 0xE01EF: 84, -} -codepoint_classes = { - "PVALID": ( - 0x2D0000002E, - 0x300000003A, - 0x610000007B, - 0xDF000000F7, - 0xF800000100, - 0x10100000102, - 0x10300000104, - 0x10500000106, - 0x10700000108, - 0x1090000010A, - 0x10B0000010C, - 0x10D0000010E, - 0x10F00000110, - 0x11100000112, - 0x11300000114, - 0x11500000116, - 0x11700000118, - 0x1190000011A, - 0x11B0000011C, - 0x11D0000011E, - 0x11F00000120, - 0x12100000122, - 0x12300000124, - 0x12500000126, - 0x12700000128, - 0x1290000012A, - 0x12B0000012C, - 0x12D0000012E, - 0x12F00000130, - 0x13100000132, - 0x13500000136, - 0x13700000139, - 0x13A0000013B, - 0x13C0000013D, - 0x13E0000013F, - 0x14200000143, - 0x14400000145, - 0x14600000147, - 0x14800000149, - 0x14B0000014C, - 0x14D0000014E, - 0x14F00000150, - 0x15100000152, - 0x15300000154, - 0x15500000156, - 0x15700000158, - 0x1590000015A, - 0x15B0000015C, - 0x15D0000015E, - 0x15F00000160, - 0x16100000162, - 0x16300000164, - 0x16500000166, - 0x16700000168, - 0x1690000016A, - 0x16B0000016C, - 0x16D0000016E, - 0x16F00000170, - 0x17100000172, - 0x17300000174, - 0x17500000176, - 0x17700000178, - 0x17A0000017B, - 0x17C0000017D, - 0x17E0000017F, - 0x18000000181, - 0x18300000184, - 0x18500000186, - 0x18800000189, - 0x18C0000018E, - 0x19200000193, - 0x19500000196, - 0x1990000019C, - 0x19E0000019F, - 0x1A1000001A2, - 0x1A3000001A4, - 0x1A5000001A6, - 0x1A8000001A9, - 0x1AA000001AC, - 0x1AD000001AE, - 0x1B0000001B1, - 0x1B4000001B5, - 0x1B6000001B7, - 0x1B9000001BC, - 0x1BD000001C4, - 0x1CE000001CF, - 0x1D0000001D1, - 0x1D2000001D3, - 0x1D4000001D5, - 0x1D6000001D7, - 0x1D8000001D9, - 0x1DA000001DB, - 0x1DC000001DE, - 0x1DF000001E0, - 0x1E1000001E2, - 0x1E3000001E4, - 0x1E5000001E6, - 0x1E7000001E8, - 0x1E9000001EA, - 0x1EB000001EC, - 0x1ED000001EE, - 0x1EF000001F1, - 0x1F5000001F6, - 0x1F9000001FA, - 0x1FB000001FC, - 0x1FD000001FE, - 0x1FF00000200, - 0x20100000202, - 0x20300000204, - 0x20500000206, - 0x20700000208, - 0x2090000020A, - 0x20B0000020C, - 0x20D0000020E, - 0x20F00000210, - 0x21100000212, - 0x21300000214, - 0x21500000216, - 0x21700000218, - 0x2190000021A, - 0x21B0000021C, - 0x21D0000021E, - 0x21F00000220, - 0x22100000222, - 0x22300000224, - 0x22500000226, - 0x22700000228, - 0x2290000022A, - 0x22B0000022C, - 0x22D0000022E, - 0x22F00000230, - 0x23100000232, - 0x2330000023A, - 0x23C0000023D, - 0x23F00000241, - 0x24200000243, - 0x24700000248, - 0x2490000024A, - 0x24B0000024C, - 0x24D0000024E, - 0x24F000002B0, - 0x2B9000002C2, - 0x2C6000002D2, - 0x2EC000002ED, - 0x2EE000002EF, - 0x30000000340, - 0x34200000343, - 0x3460000034F, - 0x35000000370, - 0x37100000372, - 0x37300000374, - 0x37700000378, - 0x37B0000037E, - 0x39000000391, - 0x3AC000003CF, - 0x3D7000003D8, - 0x3D9000003DA, - 0x3DB000003DC, - 0x3DD000003DE, - 0x3DF000003E0, - 0x3E1000003E2, - 0x3E3000003E4, - 0x3E5000003E6, - 0x3E7000003E8, - 0x3E9000003EA, - 0x3EB000003EC, - 0x3ED000003EE, - 0x3EF000003F0, - 0x3F3000003F4, - 0x3F8000003F9, - 0x3FB000003FD, - 0x43000000460, - 0x46100000462, - 0x46300000464, - 0x46500000466, - 0x46700000468, - 0x4690000046A, - 0x46B0000046C, - 0x46D0000046E, - 0x46F00000470, - 0x47100000472, - 0x47300000474, - 0x47500000476, - 0x47700000478, - 0x4790000047A, - 0x47B0000047C, - 0x47D0000047E, - 0x47F00000480, - 0x48100000482, - 0x48300000488, - 0x48B0000048C, - 0x48D0000048E, - 0x48F00000490, - 0x49100000492, - 0x49300000494, - 0x49500000496, - 0x49700000498, - 0x4990000049A, - 0x49B0000049C, - 0x49D0000049E, - 0x49F000004A0, - 0x4A1000004A2, - 0x4A3000004A4, - 0x4A5000004A6, - 0x4A7000004A8, - 0x4A9000004AA, - 0x4AB000004AC, - 0x4AD000004AE, - 0x4AF000004B0, - 0x4B1000004B2, - 0x4B3000004B4, - 0x4B5000004B6, - 0x4B7000004B8, - 0x4B9000004BA, - 0x4BB000004BC, - 0x4BD000004BE, - 0x4BF000004C0, - 0x4C2000004C3, - 0x4C4000004C5, - 0x4C6000004C7, - 0x4C8000004C9, - 0x4CA000004CB, - 0x4CC000004CD, - 0x4CE000004D0, - 0x4D1000004D2, - 0x4D3000004D4, - 0x4D5000004D6, - 0x4D7000004D8, - 0x4D9000004DA, - 0x4DB000004DC, - 0x4DD000004DE, - 0x4DF000004E0, - 0x4E1000004E2, - 0x4E3000004E4, - 0x4E5000004E6, - 0x4E7000004E8, - 0x4E9000004EA, - 0x4EB000004EC, - 0x4ED000004EE, - 0x4EF000004F0, - 0x4F1000004F2, - 0x4F3000004F4, - 0x4F5000004F6, - 0x4F7000004F8, - 0x4F9000004FA, - 0x4FB000004FC, - 0x4FD000004FE, - 0x4FF00000500, - 0x50100000502, - 0x50300000504, - 0x50500000506, - 0x50700000508, - 0x5090000050A, - 0x50B0000050C, - 0x50D0000050E, - 0x50F00000510, - 0x51100000512, - 0x51300000514, - 0x51500000516, - 0x51700000518, - 0x5190000051A, - 0x51B0000051C, - 0x51D0000051E, - 0x51F00000520, - 0x52100000522, - 0x52300000524, - 0x52500000526, - 0x52700000528, - 0x5290000052A, - 0x52B0000052C, - 0x52D0000052E, - 0x52F00000530, - 0x5590000055A, - 0x56000000587, - 0x58800000589, - 0x591000005BE, - 0x5BF000005C0, - 0x5C1000005C3, - 0x5C4000005C6, - 0x5C7000005C8, - 0x5D0000005EB, - 0x5EF000005F3, - 0x6100000061B, - 0x62000000640, - 0x64100000660, - 0x66E00000675, - 0x679000006D4, - 0x6D5000006DD, - 0x6DF000006E9, - 0x6EA000006F0, - 0x6FA00000700, - 0x7100000074B, - 0x74D000007B2, - 0x7C0000007F6, - 0x7FD000007FE, - 0x8000000082E, - 0x8400000085C, - 0x8600000086B, - 0x87000000888, - 0x8890000088F, - 0x898000008E2, - 0x8E300000958, - 0x96000000964, - 0x96600000970, - 0x97100000984, - 0x9850000098D, - 0x98F00000991, - 0x993000009A9, - 0x9AA000009B1, - 0x9B2000009B3, - 0x9B6000009BA, - 0x9BC000009C5, - 0x9C7000009C9, - 0x9CB000009CF, - 0x9D7000009D8, - 0x9E0000009E4, - 0x9E6000009F2, - 0x9FC000009FD, - 0x9FE000009FF, - 0xA0100000A04, - 0xA0500000A0B, - 0xA0F00000A11, - 0xA1300000A29, - 0xA2A00000A31, - 0xA3200000A33, - 0xA3500000A36, - 0xA3800000A3A, - 0xA3C00000A3D, - 0xA3E00000A43, - 0xA4700000A49, - 0xA4B00000A4E, - 0xA5100000A52, - 0xA5C00000A5D, - 0xA6600000A76, - 0xA8100000A84, - 0xA8500000A8E, - 0xA8F00000A92, - 0xA9300000AA9, - 0xAAA00000AB1, - 0xAB200000AB4, - 0xAB500000ABA, - 0xABC00000AC6, - 0xAC700000ACA, - 0xACB00000ACE, - 0xAD000000AD1, - 0xAE000000AE4, - 0xAE600000AF0, - 0xAF900000B00, - 0xB0100000B04, - 0xB0500000B0D, - 0xB0F00000B11, - 0xB1300000B29, - 0xB2A00000B31, - 0xB3200000B34, - 0xB3500000B3A, - 0xB3C00000B45, - 0xB4700000B49, - 0xB4B00000B4E, - 0xB5500000B58, - 0xB5F00000B64, - 0xB6600000B70, - 0xB7100000B72, - 0xB8200000B84, - 0xB8500000B8B, - 0xB8E00000B91, - 0xB9200000B96, - 0xB9900000B9B, - 0xB9C00000B9D, - 0xB9E00000BA0, - 0xBA300000BA5, - 0xBA800000BAB, - 0xBAE00000BBA, - 0xBBE00000BC3, - 0xBC600000BC9, - 0xBCA00000BCE, - 0xBD000000BD1, - 0xBD700000BD8, - 0xBE600000BF0, - 0xC0000000C0D, - 0xC0E00000C11, - 0xC1200000C29, - 0xC2A00000C3A, - 0xC3C00000C45, - 0xC4600000C49, - 0xC4A00000C4E, - 0xC5500000C57, - 0xC5800000C5B, - 0xC5D00000C5E, - 0xC6000000C64, - 0xC6600000C70, - 0xC8000000C84, - 0xC8500000C8D, - 0xC8E00000C91, - 0xC9200000CA9, - 0xCAA00000CB4, - 0xCB500000CBA, - 0xCBC00000CC5, - 0xCC600000CC9, - 0xCCA00000CCE, - 0xCD500000CD7, - 0xCDD00000CDF, - 0xCE000000CE4, - 0xCE600000CF0, - 0xCF100000CF4, - 0xD0000000D0D, - 0xD0E00000D11, - 0xD1200000D45, - 0xD4600000D49, - 0xD4A00000D4F, - 0xD5400000D58, - 0xD5F00000D64, - 0xD6600000D70, - 0xD7A00000D80, - 0xD8100000D84, - 0xD8500000D97, - 0xD9A00000DB2, - 0xDB300000DBC, - 0xDBD00000DBE, - 0xDC000000DC7, - 0xDCA00000DCB, - 0xDCF00000DD5, - 0xDD600000DD7, - 0xDD800000DE0, - 0xDE600000DF0, - 0xDF200000DF4, - 0xE0100000E33, - 0xE3400000E3B, - 0xE4000000E4F, - 0xE5000000E5A, - 0xE8100000E83, - 0xE8400000E85, - 0xE8600000E8B, - 0xE8C00000EA4, - 0xEA500000EA6, - 0xEA700000EB3, - 0xEB400000EBE, - 0xEC000000EC5, - 0xEC600000EC7, - 0xEC800000ECF, - 0xED000000EDA, - 0xEDE00000EE0, - 0xF0000000F01, - 0xF0B00000F0C, - 0xF1800000F1A, - 0xF2000000F2A, - 0xF3500000F36, - 0xF3700000F38, - 0xF3900000F3A, - 0xF3E00000F43, - 0xF4400000F48, - 0xF4900000F4D, - 0xF4E00000F52, - 0xF5300000F57, - 0xF5800000F5C, - 0xF5D00000F69, - 0xF6A00000F6D, - 0xF7100000F73, - 0xF7400000F75, - 0xF7A00000F81, - 0xF8200000F85, - 0xF8600000F93, - 0xF9400000F98, - 0xF9900000F9D, - 0xF9E00000FA2, - 0xFA300000FA7, - 0xFA800000FAC, - 0xFAD00000FB9, - 0xFBA00000FBD, - 0xFC600000FC7, - 0x10000000104A, - 0x10500000109E, - 0x10D0000010FB, - 0x10FD00001100, - 0x120000001249, - 0x124A0000124E, - 0x125000001257, - 0x125800001259, - 0x125A0000125E, - 0x126000001289, - 0x128A0000128E, - 0x1290000012B1, - 0x12B2000012B6, - 0x12B8000012BF, - 0x12C0000012C1, - 0x12C2000012C6, - 0x12C8000012D7, - 0x12D800001311, - 0x131200001316, - 0x13180000135B, - 0x135D00001360, - 0x138000001390, - 0x13A0000013F6, - 0x14010000166D, - 0x166F00001680, - 0x16810000169B, - 0x16A0000016EB, - 0x16F1000016F9, - 0x170000001716, - 0x171F00001735, - 0x174000001754, - 0x17600000176D, - 0x176E00001771, - 0x177200001774, - 0x1780000017B4, - 0x17B6000017D4, - 0x17D7000017D8, - 0x17DC000017DE, - 0x17E0000017EA, - 0x18100000181A, - 0x182000001879, - 0x1880000018AB, - 0x18B0000018F6, - 0x19000000191F, - 0x19200000192C, - 0x19300000193C, - 0x19460000196E, - 0x197000001975, - 0x1980000019AC, - 0x19B0000019CA, - 0x19D0000019DA, - 0x1A0000001A1C, - 0x1A2000001A5F, - 0x1A6000001A7D, - 0x1A7F00001A8A, - 0x1A9000001A9A, - 0x1AA700001AA8, - 0x1AB000001ABE, - 0x1ABF00001ACF, - 0x1B0000001B4D, - 0x1B5000001B5A, - 0x1B6B00001B74, - 0x1B8000001BF4, - 0x1C0000001C38, - 0x1C4000001C4A, - 0x1C4D00001C7E, - 0x1CD000001CD3, - 0x1CD400001CFB, - 0x1D0000001D2C, - 0x1D2F00001D30, - 0x1D3B00001D3C, - 0x1D4E00001D4F, - 0x1D6B00001D78, - 0x1D7900001D9B, - 0x1DC000001E00, - 0x1E0100001E02, - 0x1E0300001E04, - 0x1E0500001E06, - 0x1E0700001E08, - 0x1E0900001E0A, - 0x1E0B00001E0C, - 0x1E0D00001E0E, - 0x1E0F00001E10, - 0x1E1100001E12, - 0x1E1300001E14, - 0x1E1500001E16, - 0x1E1700001E18, - 0x1E1900001E1A, - 0x1E1B00001E1C, - 0x1E1D00001E1E, - 0x1E1F00001E20, - 0x1E2100001E22, - 0x1E2300001E24, - 0x1E2500001E26, - 0x1E2700001E28, - 0x1E2900001E2A, - 0x1E2B00001E2C, - 0x1E2D00001E2E, - 0x1E2F00001E30, - 0x1E3100001E32, - 0x1E3300001E34, - 0x1E3500001E36, - 0x1E3700001E38, - 0x1E3900001E3A, - 0x1E3B00001E3C, - 0x1E3D00001E3E, - 0x1E3F00001E40, - 0x1E4100001E42, - 0x1E4300001E44, - 0x1E4500001E46, - 0x1E4700001E48, - 0x1E4900001E4A, - 0x1E4B00001E4C, - 0x1E4D00001E4E, - 0x1E4F00001E50, - 0x1E5100001E52, - 0x1E5300001E54, - 0x1E5500001E56, - 0x1E5700001E58, - 0x1E5900001E5A, - 0x1E5B00001E5C, - 0x1E5D00001E5E, - 0x1E5F00001E60, - 0x1E6100001E62, - 0x1E6300001E64, - 0x1E6500001E66, - 0x1E6700001E68, - 0x1E6900001E6A, - 0x1E6B00001E6C, - 0x1E6D00001E6E, - 0x1E6F00001E70, - 0x1E7100001E72, - 0x1E7300001E74, - 0x1E7500001E76, - 0x1E7700001E78, - 0x1E7900001E7A, - 0x1E7B00001E7C, - 0x1E7D00001E7E, - 0x1E7F00001E80, - 0x1E8100001E82, - 0x1E8300001E84, - 0x1E8500001E86, - 0x1E8700001E88, - 0x1E8900001E8A, - 0x1E8B00001E8C, - 0x1E8D00001E8E, - 0x1E8F00001E90, - 0x1E9100001E92, - 0x1E9300001E94, - 0x1E9500001E9A, - 0x1E9C00001E9E, - 0x1E9F00001EA0, - 0x1EA100001EA2, - 0x1EA300001EA4, - 0x1EA500001EA6, - 0x1EA700001EA8, - 0x1EA900001EAA, - 0x1EAB00001EAC, - 0x1EAD00001EAE, - 0x1EAF00001EB0, - 0x1EB100001EB2, - 0x1EB300001EB4, - 0x1EB500001EB6, - 0x1EB700001EB8, - 0x1EB900001EBA, - 0x1EBB00001EBC, - 0x1EBD00001EBE, - 0x1EBF00001EC0, - 0x1EC100001EC2, - 0x1EC300001EC4, - 0x1EC500001EC6, - 0x1EC700001EC8, - 0x1EC900001ECA, - 0x1ECB00001ECC, - 0x1ECD00001ECE, - 0x1ECF00001ED0, - 0x1ED100001ED2, - 0x1ED300001ED4, - 0x1ED500001ED6, - 0x1ED700001ED8, - 0x1ED900001EDA, - 0x1EDB00001EDC, - 0x1EDD00001EDE, - 0x1EDF00001EE0, - 0x1EE100001EE2, - 0x1EE300001EE4, - 0x1EE500001EE6, - 0x1EE700001EE8, - 0x1EE900001EEA, - 0x1EEB00001EEC, - 0x1EED00001EEE, - 0x1EEF00001EF0, - 0x1EF100001EF2, - 0x1EF300001EF4, - 0x1EF500001EF6, - 0x1EF700001EF8, - 0x1EF900001EFA, - 0x1EFB00001EFC, - 0x1EFD00001EFE, - 0x1EFF00001F08, - 0x1F1000001F16, - 0x1F2000001F28, - 0x1F3000001F38, - 0x1F4000001F46, - 0x1F5000001F58, - 0x1F6000001F68, - 0x1F7000001F71, - 0x1F7200001F73, - 0x1F7400001F75, - 0x1F7600001F77, - 0x1F7800001F79, - 0x1F7A00001F7B, - 0x1F7C00001F7D, - 0x1FB000001FB2, - 0x1FB600001FB7, - 0x1FC600001FC7, - 0x1FD000001FD3, - 0x1FD600001FD8, - 0x1FE000001FE3, - 0x1FE400001FE8, - 0x1FF600001FF7, - 0x214E0000214F, - 0x218400002185, - 0x2C3000002C60, - 0x2C6100002C62, - 0x2C6500002C67, - 0x2C6800002C69, - 0x2C6A00002C6B, - 0x2C6C00002C6D, - 0x2C7100002C72, - 0x2C7300002C75, - 0x2C7600002C7C, - 0x2C8100002C82, - 0x2C8300002C84, - 0x2C8500002C86, - 0x2C8700002C88, - 0x2C8900002C8A, - 0x2C8B00002C8C, - 0x2C8D00002C8E, - 0x2C8F00002C90, - 0x2C9100002C92, - 0x2C9300002C94, - 0x2C9500002C96, - 0x2C9700002C98, - 0x2C9900002C9A, - 0x2C9B00002C9C, - 0x2C9D00002C9E, - 0x2C9F00002CA0, - 0x2CA100002CA2, - 0x2CA300002CA4, - 0x2CA500002CA6, - 0x2CA700002CA8, - 0x2CA900002CAA, - 0x2CAB00002CAC, - 0x2CAD00002CAE, - 0x2CAF00002CB0, - 0x2CB100002CB2, - 0x2CB300002CB4, - 0x2CB500002CB6, - 0x2CB700002CB8, - 0x2CB900002CBA, - 0x2CBB00002CBC, - 0x2CBD00002CBE, - 0x2CBF00002CC0, - 0x2CC100002CC2, - 0x2CC300002CC4, - 0x2CC500002CC6, - 0x2CC700002CC8, - 0x2CC900002CCA, - 0x2CCB00002CCC, - 0x2CCD00002CCE, - 0x2CCF00002CD0, - 0x2CD100002CD2, - 0x2CD300002CD4, - 0x2CD500002CD6, - 0x2CD700002CD8, - 0x2CD900002CDA, - 0x2CDB00002CDC, - 0x2CDD00002CDE, - 0x2CDF00002CE0, - 0x2CE100002CE2, - 0x2CE300002CE5, - 0x2CEC00002CED, - 0x2CEE00002CF2, - 0x2CF300002CF4, - 0x2D0000002D26, - 0x2D2700002D28, - 0x2D2D00002D2E, - 0x2D3000002D68, - 0x2D7F00002D97, - 0x2DA000002DA7, - 0x2DA800002DAF, - 0x2DB000002DB7, - 0x2DB800002DBF, - 0x2DC000002DC7, - 0x2DC800002DCF, - 0x2DD000002DD7, - 0x2DD800002DDF, - 0x2DE000002E00, - 0x2E2F00002E30, - 0x300500003008, - 0x302A0000302E, - 0x303C0000303D, - 0x304100003097, - 0x30990000309B, - 0x309D0000309F, - 0x30A1000030FB, - 0x30FC000030FF, - 0x310500003130, - 0x31A0000031C0, - 0x31F000003200, - 0x340000004DC0, - 0x4E000000A48D, - 0xA4D00000A4FE, - 0xA5000000A60D, - 0xA6100000A62C, - 0xA6410000A642, - 0xA6430000A644, - 0xA6450000A646, - 0xA6470000A648, - 0xA6490000A64A, - 0xA64B0000A64C, - 0xA64D0000A64E, - 0xA64F0000A650, - 0xA6510000A652, - 0xA6530000A654, - 0xA6550000A656, - 0xA6570000A658, - 0xA6590000A65A, - 0xA65B0000A65C, - 0xA65D0000A65E, - 0xA65F0000A660, - 0xA6610000A662, - 0xA6630000A664, - 0xA6650000A666, - 0xA6670000A668, - 0xA6690000A66A, - 0xA66B0000A66C, - 0xA66D0000A670, - 0xA6740000A67E, - 0xA67F0000A680, - 0xA6810000A682, - 0xA6830000A684, - 0xA6850000A686, - 0xA6870000A688, - 0xA6890000A68A, - 0xA68B0000A68C, - 0xA68D0000A68E, - 0xA68F0000A690, - 0xA6910000A692, - 0xA6930000A694, - 0xA6950000A696, - 0xA6970000A698, - 0xA6990000A69A, - 0xA69B0000A69C, - 0xA69E0000A6E6, - 0xA6F00000A6F2, - 0xA7170000A720, - 0xA7230000A724, - 0xA7250000A726, - 0xA7270000A728, - 0xA7290000A72A, - 0xA72B0000A72C, - 0xA72D0000A72E, - 0xA72F0000A732, - 0xA7330000A734, - 0xA7350000A736, - 0xA7370000A738, - 0xA7390000A73A, - 0xA73B0000A73C, - 0xA73D0000A73E, - 0xA73F0000A740, - 0xA7410000A742, - 0xA7430000A744, - 0xA7450000A746, - 0xA7470000A748, - 0xA7490000A74A, - 0xA74B0000A74C, - 0xA74D0000A74E, - 0xA74F0000A750, - 0xA7510000A752, - 0xA7530000A754, - 0xA7550000A756, - 0xA7570000A758, - 0xA7590000A75A, - 0xA75B0000A75C, - 0xA75D0000A75E, - 0xA75F0000A760, - 0xA7610000A762, - 0xA7630000A764, - 0xA7650000A766, - 0xA7670000A768, - 0xA7690000A76A, - 0xA76B0000A76C, - 0xA76D0000A76E, - 0xA76F0000A770, - 0xA7710000A779, - 0xA77A0000A77B, - 0xA77C0000A77D, - 0xA77F0000A780, - 0xA7810000A782, - 0xA7830000A784, - 0xA7850000A786, - 0xA7870000A789, - 0xA78C0000A78D, - 0xA78E0000A790, - 0xA7910000A792, - 0xA7930000A796, - 0xA7970000A798, - 0xA7990000A79A, - 0xA79B0000A79C, - 0xA79D0000A79E, - 0xA79F0000A7A0, - 0xA7A10000A7A2, - 0xA7A30000A7A4, - 0xA7A50000A7A6, - 0xA7A70000A7A8, - 0xA7A90000A7AA, - 0xA7AF0000A7B0, - 0xA7B50000A7B6, - 0xA7B70000A7B8, - 0xA7B90000A7BA, - 0xA7BB0000A7BC, - 0xA7BD0000A7BE, - 0xA7BF0000A7C0, - 0xA7C10000A7C2, - 0xA7C30000A7C4, - 0xA7C80000A7C9, - 0xA7CA0000A7CB, - 0xA7D10000A7D2, - 0xA7D30000A7D4, - 0xA7D50000A7D6, - 0xA7D70000A7D8, - 0xA7D90000A7DA, - 0xA7F60000A7F8, - 0xA7FA0000A828, - 0xA82C0000A82D, - 0xA8400000A874, - 0xA8800000A8C6, - 0xA8D00000A8DA, - 0xA8E00000A8F8, - 0xA8FB0000A8FC, - 0xA8FD0000A92E, - 0xA9300000A954, - 0xA9800000A9C1, - 0xA9CF0000A9DA, - 0xA9E00000A9FF, - 0xAA000000AA37, - 0xAA400000AA4E, - 0xAA500000AA5A, - 0xAA600000AA77, - 0xAA7A0000AAC3, - 0xAADB0000AADE, - 0xAAE00000AAF0, - 0xAAF20000AAF7, - 0xAB010000AB07, - 0xAB090000AB0F, - 0xAB110000AB17, - 0xAB200000AB27, - 0xAB280000AB2F, - 0xAB300000AB5B, - 0xAB600000AB69, - 0xABC00000ABEB, - 0xABEC0000ABEE, - 0xABF00000ABFA, - 0xAC000000D7A4, - 0xFA0E0000FA10, - 0xFA110000FA12, - 0xFA130000FA15, - 0xFA1F0000FA20, - 0xFA210000FA22, - 0xFA230000FA25, - 0xFA270000FA2A, - 0xFB1E0000FB1F, - 0xFE200000FE30, - 0xFE730000FE74, - 0x100000001000C, - 0x1000D00010027, - 0x100280001003B, - 0x1003C0001003E, - 0x1003F0001004E, - 0x100500001005E, - 0x10080000100FB, - 0x101FD000101FE, - 0x102800001029D, - 0x102A0000102D1, - 0x102E0000102E1, - 0x1030000010320, - 0x1032D00010341, - 0x103420001034A, - 0x103500001037B, - 0x103800001039E, - 0x103A0000103C4, - 0x103C8000103D0, - 0x104280001049E, - 0x104A0000104AA, - 0x104D8000104FC, - 0x1050000010528, - 0x1053000010564, - 0x10597000105A2, - 0x105A3000105B2, - 0x105B3000105BA, - 0x105BB000105BD, - 0x1060000010737, - 0x1074000010756, - 0x1076000010768, - 0x1078000010781, - 0x1080000010806, - 0x1080800010809, - 0x1080A00010836, - 0x1083700010839, - 0x1083C0001083D, - 0x1083F00010856, - 0x1086000010877, - 0x108800001089F, - 0x108E0000108F3, - 0x108F4000108F6, - 0x1090000010916, - 0x109200001093A, - 0x10980000109B8, - 0x109BE000109C0, - 0x10A0000010A04, - 0x10A0500010A07, - 0x10A0C00010A14, - 0x10A1500010A18, - 0x10A1900010A36, - 0x10A3800010A3B, - 0x10A3F00010A40, - 0x10A6000010A7D, - 0x10A8000010A9D, - 0x10AC000010AC8, - 0x10AC900010AE7, - 0x10B0000010B36, - 0x10B4000010B56, - 0x10B6000010B73, - 0x10B8000010B92, - 0x10C0000010C49, - 0x10CC000010CF3, - 0x10D0000010D28, - 0x10D3000010D3A, - 0x10E8000010EAA, - 0x10EAB00010EAD, - 0x10EB000010EB2, - 0x10EFD00010F1D, - 0x10F2700010F28, - 0x10F3000010F51, - 0x10F7000010F86, - 0x10FB000010FC5, - 0x10FE000010FF7, - 0x1100000011047, - 0x1106600011076, - 0x1107F000110BB, - 0x110C2000110C3, - 0x110D0000110E9, - 0x110F0000110FA, - 0x1110000011135, - 0x1113600011140, - 0x1114400011148, - 0x1115000011174, - 0x1117600011177, - 0x11180000111C5, - 0x111C9000111CD, - 0x111CE000111DB, - 0x111DC000111DD, - 0x1120000011212, - 0x1121300011238, - 0x1123E00011242, - 0x1128000011287, - 0x1128800011289, - 0x1128A0001128E, - 0x1128F0001129E, - 0x1129F000112A9, - 0x112B0000112EB, - 0x112F0000112FA, - 0x1130000011304, - 0x113050001130D, - 0x1130F00011311, - 0x1131300011329, - 0x1132A00011331, - 0x1133200011334, - 0x113350001133A, - 0x1133B00011345, - 0x1134700011349, - 0x1134B0001134E, - 0x1135000011351, - 0x1135700011358, - 0x1135D00011364, - 0x113660001136D, - 0x1137000011375, - 0x114000001144B, - 0x114500001145A, - 0x1145E00011462, - 0x11480000114C6, - 0x114C7000114C8, - 0x114D0000114DA, - 0x11580000115B6, - 0x115B8000115C1, - 0x115D8000115DE, - 0x1160000011641, - 0x1164400011645, - 0x116500001165A, - 0x11680000116B9, - 0x116C0000116CA, - 0x117000001171B, - 0x1171D0001172C, - 0x117300001173A, - 0x1174000011747, - 0x118000001183B, - 0x118C0000118EA, - 0x118FF00011907, - 0x119090001190A, - 0x1190C00011914, - 0x1191500011917, - 0x1191800011936, - 0x1193700011939, - 0x1193B00011944, - 0x119500001195A, - 0x119A0000119A8, - 0x119AA000119D8, - 0x119DA000119E2, - 0x119E3000119E5, - 0x11A0000011A3F, - 0x11A4700011A48, - 0x11A5000011A9A, - 0x11A9D00011A9E, - 0x11AB000011AF9, - 0x11C0000011C09, - 0x11C0A00011C37, - 0x11C3800011C41, - 0x11C5000011C5A, - 0x11C7200011C90, - 0x11C9200011CA8, - 0x11CA900011CB7, - 0x11D0000011D07, - 0x11D0800011D0A, - 0x11D0B00011D37, - 0x11D3A00011D3B, - 0x11D3C00011D3E, - 0x11D3F00011D48, - 0x11D5000011D5A, - 0x11D6000011D66, - 0x11D6700011D69, - 0x11D6A00011D8F, - 0x11D9000011D92, - 0x11D9300011D99, - 0x11DA000011DAA, - 0x11EE000011EF7, - 0x11F0000011F11, - 0x11F1200011F3B, - 0x11F3E00011F43, - 0x11F5000011F5A, - 0x11FB000011FB1, - 0x120000001239A, - 0x1248000012544, - 0x12F9000012FF1, - 0x1300000013430, - 0x1344000013456, - 0x1440000014647, - 0x1680000016A39, - 0x16A4000016A5F, - 0x16A6000016A6A, - 0x16A7000016ABF, - 0x16AC000016ACA, - 0x16AD000016AEE, - 0x16AF000016AF5, - 0x16B0000016B37, - 0x16B4000016B44, - 0x16B5000016B5A, - 0x16B6300016B78, - 0x16B7D00016B90, - 0x16E6000016E80, - 0x16F0000016F4B, - 0x16F4F00016F88, - 0x16F8F00016FA0, - 0x16FE000016FE2, - 0x16FE300016FE5, - 0x16FF000016FF2, - 0x17000000187F8, - 0x1880000018CD6, - 0x18D0000018D09, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B123, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1B1550001B156, - 0x1B1640001B168, - 0x1B1700001B2FC, - 0x1BC000001BC6B, - 0x1BC700001BC7D, - 0x1BC800001BC89, - 0x1BC900001BC9A, - 0x1BC9D0001BC9F, - 0x1CF000001CF2E, - 0x1CF300001CF47, - 0x1DA000001DA37, - 0x1DA3B0001DA6D, - 0x1DA750001DA76, - 0x1DA840001DA85, - 0x1DA9B0001DAA0, - 0x1DAA10001DAB0, - 0x1DF000001DF1F, - 0x1DF250001DF2B, - 0x1E0000001E007, - 0x1E0080001E019, - 0x1E01B0001E022, - 0x1E0230001E025, - 0x1E0260001E02B, - 0x1E08F0001E090, - 0x1E1000001E12D, - 0x1E1300001E13E, - 0x1E1400001E14A, - 0x1E14E0001E14F, - 0x1E2900001E2AF, - 0x1E2C00001E2FA, - 0x1E4D00001E4FA, - 0x1E7E00001E7E7, - 0x1E7E80001E7EC, - 0x1E7ED0001E7EF, - 0x1E7F00001E7FF, - 0x1E8000001E8C5, - 0x1E8D00001E8D7, - 0x1E9220001E94C, - 0x1E9500001E95A, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x300000003134B, - 0x31350000323B0, - ), - "CONTEXTJ": (0x200C0000200E,), - "CONTEXTO": ( - 0xB7000000B8, - 0x37500000376, - 0x5F3000005F5, - 0x6600000066A, - 0x6F0000006FA, - 0x30FB000030FC, - ), -} diff --git a/venv/lib/python3.12/site-packages/idna/intranges.py b/venv/lib/python3.12/site-packages/idna/intranges.py deleted file mode 100644 index 7bfaa8d8..00000000 --- a/venv/lib/python3.12/site-packages/idna/intranges.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Given a list of integers, made up of (hopefully) a small number of long runs -of consecutive integers, compute a representation of the form -((start1, end1), (start2, end2) ...). Then answer the question "was x present -in the original list?" in time O(log(# runs)). -""" - -import bisect -from typing import List, Tuple - - -def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: - """Represent a list of integers as a sequence of ranges: - ((start_0, end_0), (start_1, end_1), ...), such that the original - integers are exactly those x such that start_i <= x < end_i for some i. - - Ranges are encoded as single integers (start << 32 | end), not as tuples. - """ - - sorted_list = sorted(list_) - ranges = [] - last_write = -1 - for i in range(len(sorted_list)): - if i + 1 < len(sorted_list): - if sorted_list[i] == sorted_list[i + 1] - 1: - continue - current_range = sorted_list[last_write + 1 : i + 1] - ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) - last_write = i - - return tuple(ranges) - - -def _encode_range(start: int, end: int) -> int: - return (start << 32) | end - - -def _decode_range(r: int) -> Tuple[int, int]: - return (r >> 32), (r & ((1 << 32) - 1)) - - -def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: - """Determine if `int_` falls into one of the ranges in `ranges`.""" - tuple_ = _encode_range(int_, 0) - pos = bisect.bisect_left(ranges, tuple_) - # we could be immediately ahead of a tuple (start, end) - # with start < int_ <= end - if pos > 0: - left, right = _decode_range(ranges[pos - 1]) - if left <= int_ < right: - return True - # or we could be immediately behind a tuple (int_, end) - if pos < len(ranges): - left, _ = _decode_range(ranges[pos]) - if left == int_: - return True - return False diff --git a/venv/lib/python3.12/site-packages/idna/package_data.py b/venv/lib/python3.12/site-packages/idna/package_data.py deleted file mode 100644 index 514ff7e2..00000000 --- a/venv/lib/python3.12/site-packages/idna/package_data.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "3.10" diff --git a/venv/lib/python3.12/site-packages/idna/py.typed b/venv/lib/python3.12/site-packages/idna/py.typed deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/idna/uts46data.py b/venv/lib/python3.12/site-packages/idna/uts46data.py deleted file mode 100644 index eb894327..00000000 --- a/venv/lib/python3.12/site-packages/idna/uts46data.py +++ /dev/null @@ -1,8681 +0,0 @@ -# This file is automatically generated by tools/idna-data -# vim: set fileencoding=utf-8 : - -from typing import List, Tuple, Union - -"""IDNA Mapping Table from UTS46.""" - - -__version__ = "15.1.0" - - -def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x0, "3"), - (0x1, "3"), - (0x2, "3"), - (0x3, "3"), - (0x4, "3"), - (0x5, "3"), - (0x6, "3"), - (0x7, "3"), - (0x8, "3"), - (0x9, "3"), - (0xA, "3"), - (0xB, "3"), - (0xC, "3"), - (0xD, "3"), - (0xE, "3"), - (0xF, "3"), - (0x10, "3"), - (0x11, "3"), - (0x12, "3"), - (0x13, "3"), - (0x14, "3"), - (0x15, "3"), - (0x16, "3"), - (0x17, "3"), - (0x18, "3"), - (0x19, "3"), - (0x1A, "3"), - (0x1B, "3"), - (0x1C, "3"), - (0x1D, "3"), - (0x1E, "3"), - (0x1F, "3"), - (0x20, "3"), - (0x21, "3"), - (0x22, "3"), - (0x23, "3"), - (0x24, "3"), - (0x25, "3"), - (0x26, "3"), - (0x27, "3"), - (0x28, "3"), - (0x29, "3"), - (0x2A, "3"), - (0x2B, "3"), - (0x2C, "3"), - (0x2D, "V"), - (0x2E, "V"), - (0x2F, "3"), - (0x30, "V"), - (0x31, "V"), - (0x32, "V"), - (0x33, "V"), - (0x34, "V"), - (0x35, "V"), - (0x36, "V"), - (0x37, "V"), - (0x38, "V"), - (0x39, "V"), - (0x3A, "3"), - (0x3B, "3"), - (0x3C, "3"), - (0x3D, "3"), - (0x3E, "3"), - (0x3F, "3"), - (0x40, "3"), - (0x41, "M", "a"), - (0x42, "M", "b"), - (0x43, "M", "c"), - (0x44, "M", "d"), - (0x45, "M", "e"), - (0x46, "M", "f"), - (0x47, "M", "g"), - (0x48, "M", "h"), - (0x49, "M", "i"), - (0x4A, "M", "j"), - (0x4B, "M", "k"), - (0x4C, "M", "l"), - (0x4D, "M", "m"), - (0x4E, "M", "n"), - (0x4F, "M", "o"), - (0x50, "M", "p"), - (0x51, "M", "q"), - (0x52, "M", "r"), - (0x53, "M", "s"), - (0x54, "M", "t"), - (0x55, "M", "u"), - (0x56, "M", "v"), - (0x57, "M", "w"), - (0x58, "M", "x"), - (0x59, "M", "y"), - (0x5A, "M", "z"), - (0x5B, "3"), - (0x5C, "3"), - (0x5D, "3"), - (0x5E, "3"), - (0x5F, "3"), - (0x60, "3"), - (0x61, "V"), - (0x62, "V"), - (0x63, "V"), - ] - - -def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x64, "V"), - (0x65, "V"), - (0x66, "V"), - (0x67, "V"), - (0x68, "V"), - (0x69, "V"), - (0x6A, "V"), - (0x6B, "V"), - (0x6C, "V"), - (0x6D, "V"), - (0x6E, "V"), - (0x6F, "V"), - (0x70, "V"), - (0x71, "V"), - (0x72, "V"), - (0x73, "V"), - (0x74, "V"), - (0x75, "V"), - (0x76, "V"), - (0x77, "V"), - (0x78, "V"), - (0x79, "V"), - (0x7A, "V"), - (0x7B, "3"), - (0x7C, "3"), - (0x7D, "3"), - (0x7E, "3"), - (0x7F, "3"), - (0x80, "X"), - (0x81, "X"), - (0x82, "X"), - (0x83, "X"), - (0x84, "X"), - (0x85, "X"), - (0x86, "X"), - (0x87, "X"), - (0x88, "X"), - (0x89, "X"), - (0x8A, "X"), - (0x8B, "X"), - (0x8C, "X"), - (0x8D, "X"), - (0x8E, "X"), - (0x8F, "X"), - (0x90, "X"), - (0x91, "X"), - (0x92, "X"), - (0x93, "X"), - (0x94, "X"), - (0x95, "X"), - (0x96, "X"), - (0x97, "X"), - (0x98, "X"), - (0x99, "X"), - (0x9A, "X"), - (0x9B, "X"), - (0x9C, "X"), - (0x9D, "X"), - (0x9E, "X"), - (0x9F, "X"), - (0xA0, "3", " "), - (0xA1, "V"), - (0xA2, "V"), - (0xA3, "V"), - (0xA4, "V"), - (0xA5, "V"), - (0xA6, "V"), - (0xA7, "V"), - (0xA8, "3", " ̈"), - (0xA9, "V"), - (0xAA, "M", "a"), - (0xAB, "V"), - (0xAC, "V"), - (0xAD, "I"), - (0xAE, "V"), - (0xAF, "3", " ̄"), - (0xB0, "V"), - (0xB1, "V"), - (0xB2, "M", "2"), - (0xB3, "M", "3"), - (0xB4, "3", " ́"), - (0xB5, "M", "μ"), - (0xB6, "V"), - (0xB7, "V"), - (0xB8, "3", " ̧"), - (0xB9, "M", "1"), - (0xBA, "M", "o"), - (0xBB, "V"), - (0xBC, "M", "1⁄4"), - (0xBD, "M", "1⁄2"), - (0xBE, "M", "3⁄4"), - (0xBF, "V"), - (0xC0, "M", "à"), - (0xC1, "M", "á"), - (0xC2, "M", "â"), - (0xC3, "M", "ã"), - (0xC4, "M", "ä"), - (0xC5, "M", "å"), - (0xC6, "M", "æ"), - (0xC7, "M", "ç"), - ] - - -def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC8, "M", "è"), - (0xC9, "M", "é"), - (0xCA, "M", "ê"), - (0xCB, "M", "ë"), - (0xCC, "M", "ì"), - (0xCD, "M", "í"), - (0xCE, "M", "î"), - (0xCF, "M", "ï"), - (0xD0, "M", "ð"), - (0xD1, "M", "ñ"), - (0xD2, "M", "ò"), - (0xD3, "M", "ó"), - (0xD4, "M", "ô"), - (0xD5, "M", "õ"), - (0xD6, "M", "ö"), - (0xD7, "V"), - (0xD8, "M", "ø"), - (0xD9, "M", "ù"), - (0xDA, "M", "ú"), - (0xDB, "M", "û"), - (0xDC, "M", "ü"), - (0xDD, "M", "ý"), - (0xDE, "M", "þ"), - (0xDF, "D", "ss"), - (0xE0, "V"), - (0xE1, "V"), - (0xE2, "V"), - (0xE3, "V"), - (0xE4, "V"), - (0xE5, "V"), - (0xE6, "V"), - (0xE7, "V"), - (0xE8, "V"), - (0xE9, "V"), - (0xEA, "V"), - (0xEB, "V"), - (0xEC, "V"), - (0xED, "V"), - (0xEE, "V"), - (0xEF, "V"), - (0xF0, "V"), - (0xF1, "V"), - (0xF2, "V"), - (0xF3, "V"), - (0xF4, "V"), - (0xF5, "V"), - (0xF6, "V"), - (0xF7, "V"), - (0xF8, "V"), - (0xF9, "V"), - (0xFA, "V"), - (0xFB, "V"), - (0xFC, "V"), - (0xFD, "V"), - (0xFE, "V"), - (0xFF, "V"), - (0x100, "M", "ā"), - (0x101, "V"), - (0x102, "M", "ă"), - (0x103, "V"), - (0x104, "M", "ą"), - (0x105, "V"), - (0x106, "M", "ć"), - (0x107, "V"), - (0x108, "M", "ĉ"), - (0x109, "V"), - (0x10A, "M", "ċ"), - (0x10B, "V"), - (0x10C, "M", "č"), - (0x10D, "V"), - (0x10E, "M", "ď"), - (0x10F, "V"), - (0x110, "M", "đ"), - (0x111, "V"), - (0x112, "M", "ē"), - (0x113, "V"), - (0x114, "M", "ĕ"), - (0x115, "V"), - (0x116, "M", "ė"), - (0x117, "V"), - (0x118, "M", "ę"), - (0x119, "V"), - (0x11A, "M", "ě"), - (0x11B, "V"), - (0x11C, "M", "ĝ"), - (0x11D, "V"), - (0x11E, "M", "ğ"), - (0x11F, "V"), - (0x120, "M", "ġ"), - (0x121, "V"), - (0x122, "M", "ģ"), - (0x123, "V"), - (0x124, "M", "ĥ"), - (0x125, "V"), - (0x126, "M", "ħ"), - (0x127, "V"), - (0x128, "M", "ĩ"), - (0x129, "V"), - (0x12A, "M", "ī"), - (0x12B, "V"), - ] - - -def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x12C, "M", "ĭ"), - (0x12D, "V"), - (0x12E, "M", "į"), - (0x12F, "V"), - (0x130, "M", "i̇"), - (0x131, "V"), - (0x132, "M", "ij"), - (0x134, "M", "ĵ"), - (0x135, "V"), - (0x136, "M", "ķ"), - (0x137, "V"), - (0x139, "M", "ĺ"), - (0x13A, "V"), - (0x13B, "M", "ļ"), - (0x13C, "V"), - (0x13D, "M", "ľ"), - (0x13E, "V"), - (0x13F, "M", "l·"), - (0x141, "M", "ł"), - (0x142, "V"), - (0x143, "M", "ń"), - (0x144, "V"), - (0x145, "M", "ņ"), - (0x146, "V"), - (0x147, "M", "ň"), - (0x148, "V"), - (0x149, "M", "ʼn"), - (0x14A, "M", "ŋ"), - (0x14B, "V"), - (0x14C, "M", "ō"), - (0x14D, "V"), - (0x14E, "M", "ŏ"), - (0x14F, "V"), - (0x150, "M", "ő"), - (0x151, "V"), - (0x152, "M", "œ"), - (0x153, "V"), - (0x154, "M", "ŕ"), - (0x155, "V"), - (0x156, "M", "ŗ"), - (0x157, "V"), - (0x158, "M", "ř"), - (0x159, "V"), - (0x15A, "M", "ś"), - (0x15B, "V"), - (0x15C, "M", "ŝ"), - (0x15D, "V"), - (0x15E, "M", "ş"), - (0x15F, "V"), - (0x160, "M", "š"), - (0x161, "V"), - (0x162, "M", "ţ"), - (0x163, "V"), - (0x164, "M", "ť"), - (0x165, "V"), - (0x166, "M", "ŧ"), - (0x167, "V"), - (0x168, "M", "ũ"), - (0x169, "V"), - (0x16A, "M", "ū"), - (0x16B, "V"), - (0x16C, "M", "ŭ"), - (0x16D, "V"), - (0x16E, "M", "ů"), - (0x16F, "V"), - (0x170, "M", "ű"), - (0x171, "V"), - (0x172, "M", "ų"), - (0x173, "V"), - (0x174, "M", "ŵ"), - (0x175, "V"), - (0x176, "M", "ŷ"), - (0x177, "V"), - (0x178, "M", "ÿ"), - (0x179, "M", "ź"), - (0x17A, "V"), - (0x17B, "M", "ż"), - (0x17C, "V"), - (0x17D, "M", "ž"), - (0x17E, "V"), - (0x17F, "M", "s"), - (0x180, "V"), - (0x181, "M", "ɓ"), - (0x182, "M", "ƃ"), - (0x183, "V"), - (0x184, "M", "ƅ"), - (0x185, "V"), - (0x186, "M", "ɔ"), - (0x187, "M", "ƈ"), - (0x188, "V"), - (0x189, "M", "ɖ"), - (0x18A, "M", "ɗ"), - (0x18B, "M", "ƌ"), - (0x18C, "V"), - (0x18E, "M", "ǝ"), - (0x18F, "M", "ə"), - (0x190, "M", "ɛ"), - (0x191, "M", "ƒ"), - (0x192, "V"), - (0x193, "M", "ɠ"), - ] - - -def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x194, "M", "ɣ"), - (0x195, "V"), - (0x196, "M", "ɩ"), - (0x197, "M", "ɨ"), - (0x198, "M", "ƙ"), - (0x199, "V"), - (0x19C, "M", "ɯ"), - (0x19D, "M", "ɲ"), - (0x19E, "V"), - (0x19F, "M", "ɵ"), - (0x1A0, "M", "ơ"), - (0x1A1, "V"), - (0x1A2, "M", "ƣ"), - (0x1A3, "V"), - (0x1A4, "M", "ƥ"), - (0x1A5, "V"), - (0x1A6, "M", "ʀ"), - (0x1A7, "M", "ƨ"), - (0x1A8, "V"), - (0x1A9, "M", "ʃ"), - (0x1AA, "V"), - (0x1AC, "M", "ƭ"), - (0x1AD, "V"), - (0x1AE, "M", "ʈ"), - (0x1AF, "M", "ư"), - (0x1B0, "V"), - (0x1B1, "M", "ʊ"), - (0x1B2, "M", "ʋ"), - (0x1B3, "M", "ƴ"), - (0x1B4, "V"), - (0x1B5, "M", "ƶ"), - (0x1B6, "V"), - (0x1B7, "M", "ʒ"), - (0x1B8, "M", "ƹ"), - (0x1B9, "V"), - (0x1BC, "M", "ƽ"), - (0x1BD, "V"), - (0x1C4, "M", "dž"), - (0x1C7, "M", "lj"), - (0x1CA, "M", "nj"), - (0x1CD, "M", "ǎ"), - (0x1CE, "V"), - (0x1CF, "M", "ǐ"), - (0x1D0, "V"), - (0x1D1, "M", "ǒ"), - (0x1D2, "V"), - (0x1D3, "M", "ǔ"), - (0x1D4, "V"), - (0x1D5, "M", "ǖ"), - (0x1D6, "V"), - (0x1D7, "M", "ǘ"), - (0x1D8, "V"), - (0x1D9, "M", "ǚ"), - (0x1DA, "V"), - (0x1DB, "M", "ǜ"), - (0x1DC, "V"), - (0x1DE, "M", "ǟ"), - (0x1DF, "V"), - (0x1E0, "M", "ǡ"), - (0x1E1, "V"), - (0x1E2, "M", "ǣ"), - (0x1E3, "V"), - (0x1E4, "M", "ǥ"), - (0x1E5, "V"), - (0x1E6, "M", "ǧ"), - (0x1E7, "V"), - (0x1E8, "M", "ǩ"), - (0x1E9, "V"), - (0x1EA, "M", "ǫ"), - (0x1EB, "V"), - (0x1EC, "M", "ǭ"), - (0x1ED, "V"), - (0x1EE, "M", "ǯ"), - (0x1EF, "V"), - (0x1F1, "M", "dz"), - (0x1F4, "M", "ǵ"), - (0x1F5, "V"), - (0x1F6, "M", "ƕ"), - (0x1F7, "M", "ƿ"), - (0x1F8, "M", "ǹ"), - (0x1F9, "V"), - (0x1FA, "M", "ǻ"), - (0x1FB, "V"), - (0x1FC, "M", "ǽ"), - (0x1FD, "V"), - (0x1FE, "M", "ǿ"), - (0x1FF, "V"), - (0x200, "M", "ȁ"), - (0x201, "V"), - (0x202, "M", "ȃ"), - (0x203, "V"), - (0x204, "M", "ȅ"), - (0x205, "V"), - (0x206, "M", "ȇ"), - (0x207, "V"), - (0x208, "M", "ȉ"), - (0x209, "V"), - (0x20A, "M", "ȋ"), - (0x20B, "V"), - (0x20C, "M", "ȍ"), - ] - - -def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x20D, "V"), - (0x20E, "M", "ȏ"), - (0x20F, "V"), - (0x210, "M", "ȑ"), - (0x211, "V"), - (0x212, "M", "ȓ"), - (0x213, "V"), - (0x214, "M", "ȕ"), - (0x215, "V"), - (0x216, "M", "ȗ"), - (0x217, "V"), - (0x218, "M", "ș"), - (0x219, "V"), - (0x21A, "M", "ț"), - (0x21B, "V"), - (0x21C, "M", "ȝ"), - (0x21D, "V"), - (0x21E, "M", "ȟ"), - (0x21F, "V"), - (0x220, "M", "ƞ"), - (0x221, "V"), - (0x222, "M", "ȣ"), - (0x223, "V"), - (0x224, "M", "ȥ"), - (0x225, "V"), - (0x226, "M", "ȧ"), - (0x227, "V"), - (0x228, "M", "ȩ"), - (0x229, "V"), - (0x22A, "M", "ȫ"), - (0x22B, "V"), - (0x22C, "M", "ȭ"), - (0x22D, "V"), - (0x22E, "M", "ȯ"), - (0x22F, "V"), - (0x230, "M", "ȱ"), - (0x231, "V"), - (0x232, "M", "ȳ"), - (0x233, "V"), - (0x23A, "M", "ⱥ"), - (0x23B, "M", "ȼ"), - (0x23C, "V"), - (0x23D, "M", "ƚ"), - (0x23E, "M", "ⱦ"), - (0x23F, "V"), - (0x241, "M", "ɂ"), - (0x242, "V"), - (0x243, "M", "ƀ"), - (0x244, "M", "ʉ"), - (0x245, "M", "ʌ"), - (0x246, "M", "ɇ"), - (0x247, "V"), - (0x248, "M", "ɉ"), - (0x249, "V"), - (0x24A, "M", "ɋ"), - (0x24B, "V"), - (0x24C, "M", "ɍ"), - (0x24D, "V"), - (0x24E, "M", "ɏ"), - (0x24F, "V"), - (0x2B0, "M", "h"), - (0x2B1, "M", "ɦ"), - (0x2B2, "M", "j"), - (0x2B3, "M", "r"), - (0x2B4, "M", "ɹ"), - (0x2B5, "M", "ɻ"), - (0x2B6, "M", "ʁ"), - (0x2B7, "M", "w"), - (0x2B8, "M", "y"), - (0x2B9, "V"), - (0x2D8, "3", " ̆"), - (0x2D9, "3", " ̇"), - (0x2DA, "3", " ̊"), - (0x2DB, "3", " ̨"), - (0x2DC, "3", " ̃"), - (0x2DD, "3", " ̋"), - (0x2DE, "V"), - (0x2E0, "M", "ɣ"), - (0x2E1, "M", "l"), - (0x2E2, "M", "s"), - (0x2E3, "M", "x"), - (0x2E4, "M", "ʕ"), - (0x2E5, "V"), - (0x340, "M", "̀"), - (0x341, "M", "́"), - (0x342, "V"), - (0x343, "M", "̓"), - (0x344, "M", "̈́"), - (0x345, "M", "ι"), - (0x346, "V"), - (0x34F, "I"), - (0x350, "V"), - (0x370, "M", "ͱ"), - (0x371, "V"), - (0x372, "M", "ͳ"), - (0x373, "V"), - (0x374, "M", "ʹ"), - (0x375, "V"), - (0x376, "M", "ͷ"), - (0x377, "V"), - ] - - -def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x378, "X"), - (0x37A, "3", " ι"), - (0x37B, "V"), - (0x37E, "3", ";"), - (0x37F, "M", "ϳ"), - (0x380, "X"), - (0x384, "3", " ́"), - (0x385, "3", " ̈́"), - (0x386, "M", "ά"), - (0x387, "M", "·"), - (0x388, "M", "έ"), - (0x389, "M", "ή"), - (0x38A, "M", "ί"), - (0x38B, "X"), - (0x38C, "M", "ό"), - (0x38D, "X"), - (0x38E, "M", "ύ"), - (0x38F, "M", "ώ"), - (0x390, "V"), - (0x391, "M", "α"), - (0x392, "M", "β"), - (0x393, "M", "γ"), - (0x394, "M", "δ"), - (0x395, "M", "ε"), - (0x396, "M", "ζ"), - (0x397, "M", "η"), - (0x398, "M", "θ"), - (0x399, "M", "ι"), - (0x39A, "M", "κ"), - (0x39B, "M", "λ"), - (0x39C, "M", "μ"), - (0x39D, "M", "ν"), - (0x39E, "M", "ξ"), - (0x39F, "M", "ο"), - (0x3A0, "M", "π"), - (0x3A1, "M", "ρ"), - (0x3A2, "X"), - (0x3A3, "M", "σ"), - (0x3A4, "M", "τ"), - (0x3A5, "M", "υ"), - (0x3A6, "M", "φ"), - (0x3A7, "M", "χ"), - (0x3A8, "M", "ψ"), - (0x3A9, "M", "ω"), - (0x3AA, "M", "ϊ"), - (0x3AB, "M", "ϋ"), - (0x3AC, "V"), - (0x3C2, "D", "σ"), - (0x3C3, "V"), - (0x3CF, "M", "ϗ"), - (0x3D0, "M", "β"), - (0x3D1, "M", "θ"), - (0x3D2, "M", "υ"), - (0x3D3, "M", "ύ"), - (0x3D4, "M", "ϋ"), - (0x3D5, "M", "φ"), - (0x3D6, "M", "π"), - (0x3D7, "V"), - (0x3D8, "M", "ϙ"), - (0x3D9, "V"), - (0x3DA, "M", "ϛ"), - (0x3DB, "V"), - (0x3DC, "M", "ϝ"), - (0x3DD, "V"), - (0x3DE, "M", "ϟ"), - (0x3DF, "V"), - (0x3E0, "M", "ϡ"), - (0x3E1, "V"), - (0x3E2, "M", "ϣ"), - (0x3E3, "V"), - (0x3E4, "M", "ϥ"), - (0x3E5, "V"), - (0x3E6, "M", "ϧ"), - (0x3E7, "V"), - (0x3E8, "M", "ϩ"), - (0x3E9, "V"), - (0x3EA, "M", "ϫ"), - (0x3EB, "V"), - (0x3EC, "M", "ϭ"), - (0x3ED, "V"), - (0x3EE, "M", "ϯ"), - (0x3EF, "V"), - (0x3F0, "M", "κ"), - (0x3F1, "M", "ρ"), - (0x3F2, "M", "σ"), - (0x3F3, "V"), - (0x3F4, "M", "θ"), - (0x3F5, "M", "ε"), - (0x3F6, "V"), - (0x3F7, "M", "ϸ"), - (0x3F8, "V"), - (0x3F9, "M", "σ"), - (0x3FA, "M", "ϻ"), - (0x3FB, "V"), - (0x3FD, "M", "ͻ"), - (0x3FE, "M", "ͼ"), - (0x3FF, "M", "ͽ"), - (0x400, "M", "ѐ"), - (0x401, "M", "ё"), - (0x402, "M", "ђ"), - ] - - -def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x403, "M", "ѓ"), - (0x404, "M", "є"), - (0x405, "M", "ѕ"), - (0x406, "M", "і"), - (0x407, "M", "ї"), - (0x408, "M", "ј"), - (0x409, "M", "љ"), - (0x40A, "M", "њ"), - (0x40B, "M", "ћ"), - (0x40C, "M", "ќ"), - (0x40D, "M", "ѝ"), - (0x40E, "M", "ў"), - (0x40F, "M", "џ"), - (0x410, "M", "а"), - (0x411, "M", "б"), - (0x412, "M", "в"), - (0x413, "M", "г"), - (0x414, "M", "д"), - (0x415, "M", "е"), - (0x416, "M", "ж"), - (0x417, "M", "з"), - (0x418, "M", "и"), - (0x419, "M", "й"), - (0x41A, "M", "к"), - (0x41B, "M", "л"), - (0x41C, "M", "м"), - (0x41D, "M", "н"), - (0x41E, "M", "о"), - (0x41F, "M", "п"), - (0x420, "M", "р"), - (0x421, "M", "с"), - (0x422, "M", "т"), - (0x423, "M", "у"), - (0x424, "M", "ф"), - (0x425, "M", "х"), - (0x426, "M", "ц"), - (0x427, "M", "ч"), - (0x428, "M", "ш"), - (0x429, "M", "щ"), - (0x42A, "M", "ъ"), - (0x42B, "M", "ы"), - (0x42C, "M", "ь"), - (0x42D, "M", "э"), - (0x42E, "M", "ю"), - (0x42F, "M", "я"), - (0x430, "V"), - (0x460, "M", "ѡ"), - (0x461, "V"), - (0x462, "M", "ѣ"), - (0x463, "V"), - (0x464, "M", "ѥ"), - (0x465, "V"), - (0x466, "M", "ѧ"), - (0x467, "V"), - (0x468, "M", "ѩ"), - (0x469, "V"), - (0x46A, "M", "ѫ"), - (0x46B, "V"), - (0x46C, "M", "ѭ"), - (0x46D, "V"), - (0x46E, "M", "ѯ"), - (0x46F, "V"), - (0x470, "M", "ѱ"), - (0x471, "V"), - (0x472, "M", "ѳ"), - (0x473, "V"), - (0x474, "M", "ѵ"), - (0x475, "V"), - (0x476, "M", "ѷ"), - (0x477, "V"), - (0x478, "M", "ѹ"), - (0x479, "V"), - (0x47A, "M", "ѻ"), - (0x47B, "V"), - (0x47C, "M", "ѽ"), - (0x47D, "V"), - (0x47E, "M", "ѿ"), - (0x47F, "V"), - (0x480, "M", "ҁ"), - (0x481, "V"), - (0x48A, "M", "ҋ"), - (0x48B, "V"), - (0x48C, "M", "ҍ"), - (0x48D, "V"), - (0x48E, "M", "ҏ"), - (0x48F, "V"), - (0x490, "M", "ґ"), - (0x491, "V"), - (0x492, "M", "ғ"), - (0x493, "V"), - (0x494, "M", "ҕ"), - (0x495, "V"), - (0x496, "M", "җ"), - (0x497, "V"), - (0x498, "M", "ҙ"), - (0x499, "V"), - (0x49A, "M", "қ"), - (0x49B, "V"), - (0x49C, "M", "ҝ"), - (0x49D, "V"), - ] - - -def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x49E, "M", "ҟ"), - (0x49F, "V"), - (0x4A0, "M", "ҡ"), - (0x4A1, "V"), - (0x4A2, "M", "ң"), - (0x4A3, "V"), - (0x4A4, "M", "ҥ"), - (0x4A5, "V"), - (0x4A6, "M", "ҧ"), - (0x4A7, "V"), - (0x4A8, "M", "ҩ"), - (0x4A9, "V"), - (0x4AA, "M", "ҫ"), - (0x4AB, "V"), - (0x4AC, "M", "ҭ"), - (0x4AD, "V"), - (0x4AE, "M", "ү"), - (0x4AF, "V"), - (0x4B0, "M", "ұ"), - (0x4B1, "V"), - (0x4B2, "M", "ҳ"), - (0x4B3, "V"), - (0x4B4, "M", "ҵ"), - (0x4B5, "V"), - (0x4B6, "M", "ҷ"), - (0x4B7, "V"), - (0x4B8, "M", "ҹ"), - (0x4B9, "V"), - (0x4BA, "M", "һ"), - (0x4BB, "V"), - (0x4BC, "M", "ҽ"), - (0x4BD, "V"), - (0x4BE, "M", "ҿ"), - (0x4BF, "V"), - (0x4C0, "X"), - (0x4C1, "M", "ӂ"), - (0x4C2, "V"), - (0x4C3, "M", "ӄ"), - (0x4C4, "V"), - (0x4C5, "M", "ӆ"), - (0x4C6, "V"), - (0x4C7, "M", "ӈ"), - (0x4C8, "V"), - (0x4C9, "M", "ӊ"), - (0x4CA, "V"), - (0x4CB, "M", "ӌ"), - (0x4CC, "V"), - (0x4CD, "M", "ӎ"), - (0x4CE, "V"), - (0x4D0, "M", "ӑ"), - (0x4D1, "V"), - (0x4D2, "M", "ӓ"), - (0x4D3, "V"), - (0x4D4, "M", "ӕ"), - (0x4D5, "V"), - (0x4D6, "M", "ӗ"), - (0x4D7, "V"), - (0x4D8, "M", "ә"), - (0x4D9, "V"), - (0x4DA, "M", "ӛ"), - (0x4DB, "V"), - (0x4DC, "M", "ӝ"), - (0x4DD, "V"), - (0x4DE, "M", "ӟ"), - (0x4DF, "V"), - (0x4E0, "M", "ӡ"), - (0x4E1, "V"), - (0x4E2, "M", "ӣ"), - (0x4E3, "V"), - (0x4E4, "M", "ӥ"), - (0x4E5, "V"), - (0x4E6, "M", "ӧ"), - (0x4E7, "V"), - (0x4E8, "M", "ө"), - (0x4E9, "V"), - (0x4EA, "M", "ӫ"), - (0x4EB, "V"), - (0x4EC, "M", "ӭ"), - (0x4ED, "V"), - (0x4EE, "M", "ӯ"), - (0x4EF, "V"), - (0x4F0, "M", "ӱ"), - (0x4F1, "V"), - (0x4F2, "M", "ӳ"), - (0x4F3, "V"), - (0x4F4, "M", "ӵ"), - (0x4F5, "V"), - (0x4F6, "M", "ӷ"), - (0x4F7, "V"), - (0x4F8, "M", "ӹ"), - (0x4F9, "V"), - (0x4FA, "M", "ӻ"), - (0x4FB, "V"), - (0x4FC, "M", "ӽ"), - (0x4FD, "V"), - (0x4FE, "M", "ӿ"), - (0x4FF, "V"), - (0x500, "M", "ԁ"), - (0x501, "V"), - (0x502, "M", "ԃ"), - ] - - -def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x503, "V"), - (0x504, "M", "ԅ"), - (0x505, "V"), - (0x506, "M", "ԇ"), - (0x507, "V"), - (0x508, "M", "ԉ"), - (0x509, "V"), - (0x50A, "M", "ԋ"), - (0x50B, "V"), - (0x50C, "M", "ԍ"), - (0x50D, "V"), - (0x50E, "M", "ԏ"), - (0x50F, "V"), - (0x510, "M", "ԑ"), - (0x511, "V"), - (0x512, "M", "ԓ"), - (0x513, "V"), - (0x514, "M", "ԕ"), - (0x515, "V"), - (0x516, "M", "ԗ"), - (0x517, "V"), - (0x518, "M", "ԙ"), - (0x519, "V"), - (0x51A, "M", "ԛ"), - (0x51B, "V"), - (0x51C, "M", "ԝ"), - (0x51D, "V"), - (0x51E, "M", "ԟ"), - (0x51F, "V"), - (0x520, "M", "ԡ"), - (0x521, "V"), - (0x522, "M", "ԣ"), - (0x523, "V"), - (0x524, "M", "ԥ"), - (0x525, "V"), - (0x526, "M", "ԧ"), - (0x527, "V"), - (0x528, "M", "ԩ"), - (0x529, "V"), - (0x52A, "M", "ԫ"), - (0x52B, "V"), - (0x52C, "M", "ԭ"), - (0x52D, "V"), - (0x52E, "M", "ԯ"), - (0x52F, "V"), - (0x530, "X"), - (0x531, "M", "ա"), - (0x532, "M", "բ"), - (0x533, "M", "գ"), - (0x534, "M", "դ"), - (0x535, "M", "ե"), - (0x536, "M", "զ"), - (0x537, "M", "է"), - (0x538, "M", "ը"), - (0x539, "M", "թ"), - (0x53A, "M", "ժ"), - (0x53B, "M", "ի"), - (0x53C, "M", "լ"), - (0x53D, "M", "խ"), - (0x53E, "M", "ծ"), - (0x53F, "M", "կ"), - (0x540, "M", "հ"), - (0x541, "M", "ձ"), - (0x542, "M", "ղ"), - (0x543, "M", "ճ"), - (0x544, "M", "մ"), - (0x545, "M", "յ"), - (0x546, "M", "ն"), - (0x547, "M", "շ"), - (0x548, "M", "ո"), - (0x549, "M", "չ"), - (0x54A, "M", "պ"), - (0x54B, "M", "ջ"), - (0x54C, "M", "ռ"), - (0x54D, "M", "ս"), - (0x54E, "M", "վ"), - (0x54F, "M", "տ"), - (0x550, "M", "ր"), - (0x551, "M", "ց"), - (0x552, "M", "ւ"), - (0x553, "M", "փ"), - (0x554, "M", "ք"), - (0x555, "M", "օ"), - (0x556, "M", "ֆ"), - (0x557, "X"), - (0x559, "V"), - (0x587, "M", "եւ"), - (0x588, "V"), - (0x58B, "X"), - (0x58D, "V"), - (0x590, "X"), - (0x591, "V"), - (0x5C8, "X"), - (0x5D0, "V"), - (0x5EB, "X"), - (0x5EF, "V"), - (0x5F5, "X"), - (0x606, "V"), - (0x61C, "X"), - (0x61D, "V"), - ] - - -def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x675, "M", "اٴ"), - (0x676, "M", "وٴ"), - (0x677, "M", "ۇٴ"), - (0x678, "M", "يٴ"), - (0x679, "V"), - (0x6DD, "X"), - (0x6DE, "V"), - (0x70E, "X"), - (0x710, "V"), - (0x74B, "X"), - (0x74D, "V"), - (0x7B2, "X"), - (0x7C0, "V"), - (0x7FB, "X"), - (0x7FD, "V"), - (0x82E, "X"), - (0x830, "V"), - (0x83F, "X"), - (0x840, "V"), - (0x85C, "X"), - (0x85E, "V"), - (0x85F, "X"), - (0x860, "V"), - (0x86B, "X"), - (0x870, "V"), - (0x88F, "X"), - (0x898, "V"), - (0x8E2, "X"), - (0x8E3, "V"), - (0x958, "M", "क़"), - (0x959, "M", "ख़"), - (0x95A, "M", "ग़"), - (0x95B, "M", "ज़"), - (0x95C, "M", "ड़"), - (0x95D, "M", "ढ़"), - (0x95E, "M", "फ़"), - (0x95F, "M", "य़"), - (0x960, "V"), - (0x984, "X"), - (0x985, "V"), - (0x98D, "X"), - (0x98F, "V"), - (0x991, "X"), - (0x993, "V"), - (0x9A9, "X"), - (0x9AA, "V"), - (0x9B1, "X"), - (0x9B2, "V"), - (0x9B3, "X"), - (0x9B6, "V"), - (0x9BA, "X"), - (0x9BC, "V"), - (0x9C5, "X"), - (0x9C7, "V"), - (0x9C9, "X"), - (0x9CB, "V"), - (0x9CF, "X"), - (0x9D7, "V"), - (0x9D8, "X"), - (0x9DC, "M", "ড়"), - (0x9DD, "M", "ঢ়"), - (0x9DE, "X"), - (0x9DF, "M", "য়"), - (0x9E0, "V"), - (0x9E4, "X"), - (0x9E6, "V"), - (0x9FF, "X"), - (0xA01, "V"), - (0xA04, "X"), - (0xA05, "V"), - (0xA0B, "X"), - (0xA0F, "V"), - (0xA11, "X"), - (0xA13, "V"), - (0xA29, "X"), - (0xA2A, "V"), - (0xA31, "X"), - (0xA32, "V"), - (0xA33, "M", "ਲ਼"), - (0xA34, "X"), - (0xA35, "V"), - (0xA36, "M", "ਸ਼"), - (0xA37, "X"), - (0xA38, "V"), - (0xA3A, "X"), - (0xA3C, "V"), - (0xA3D, "X"), - (0xA3E, "V"), - (0xA43, "X"), - (0xA47, "V"), - (0xA49, "X"), - (0xA4B, "V"), - (0xA4E, "X"), - (0xA51, "V"), - (0xA52, "X"), - (0xA59, "M", "ਖ਼"), - (0xA5A, "M", "ਗ਼"), - (0xA5B, "M", "ਜ਼"), - (0xA5C, "V"), - (0xA5D, "X"), - ] - - -def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA5E, "M", "ਫ਼"), - (0xA5F, "X"), - (0xA66, "V"), - (0xA77, "X"), - (0xA81, "V"), - (0xA84, "X"), - (0xA85, "V"), - (0xA8E, "X"), - (0xA8F, "V"), - (0xA92, "X"), - (0xA93, "V"), - (0xAA9, "X"), - (0xAAA, "V"), - (0xAB1, "X"), - (0xAB2, "V"), - (0xAB4, "X"), - (0xAB5, "V"), - (0xABA, "X"), - (0xABC, "V"), - (0xAC6, "X"), - (0xAC7, "V"), - (0xACA, "X"), - (0xACB, "V"), - (0xACE, "X"), - (0xAD0, "V"), - (0xAD1, "X"), - (0xAE0, "V"), - (0xAE4, "X"), - (0xAE6, "V"), - (0xAF2, "X"), - (0xAF9, "V"), - (0xB00, "X"), - (0xB01, "V"), - (0xB04, "X"), - (0xB05, "V"), - (0xB0D, "X"), - (0xB0F, "V"), - (0xB11, "X"), - (0xB13, "V"), - (0xB29, "X"), - (0xB2A, "V"), - (0xB31, "X"), - (0xB32, "V"), - (0xB34, "X"), - (0xB35, "V"), - (0xB3A, "X"), - (0xB3C, "V"), - (0xB45, "X"), - (0xB47, "V"), - (0xB49, "X"), - (0xB4B, "V"), - (0xB4E, "X"), - (0xB55, "V"), - (0xB58, "X"), - (0xB5C, "M", "ଡ଼"), - (0xB5D, "M", "ଢ଼"), - (0xB5E, "X"), - (0xB5F, "V"), - (0xB64, "X"), - (0xB66, "V"), - (0xB78, "X"), - (0xB82, "V"), - (0xB84, "X"), - (0xB85, "V"), - (0xB8B, "X"), - (0xB8E, "V"), - (0xB91, "X"), - (0xB92, "V"), - (0xB96, "X"), - (0xB99, "V"), - (0xB9B, "X"), - (0xB9C, "V"), - (0xB9D, "X"), - (0xB9E, "V"), - (0xBA0, "X"), - (0xBA3, "V"), - (0xBA5, "X"), - (0xBA8, "V"), - (0xBAB, "X"), - (0xBAE, "V"), - (0xBBA, "X"), - (0xBBE, "V"), - (0xBC3, "X"), - (0xBC6, "V"), - (0xBC9, "X"), - (0xBCA, "V"), - (0xBCE, "X"), - (0xBD0, "V"), - (0xBD1, "X"), - (0xBD7, "V"), - (0xBD8, "X"), - (0xBE6, "V"), - (0xBFB, "X"), - (0xC00, "V"), - (0xC0D, "X"), - (0xC0E, "V"), - (0xC11, "X"), - (0xC12, "V"), - (0xC29, "X"), - (0xC2A, "V"), - ] - - -def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC3A, "X"), - (0xC3C, "V"), - (0xC45, "X"), - (0xC46, "V"), - (0xC49, "X"), - (0xC4A, "V"), - (0xC4E, "X"), - (0xC55, "V"), - (0xC57, "X"), - (0xC58, "V"), - (0xC5B, "X"), - (0xC5D, "V"), - (0xC5E, "X"), - (0xC60, "V"), - (0xC64, "X"), - (0xC66, "V"), - (0xC70, "X"), - (0xC77, "V"), - (0xC8D, "X"), - (0xC8E, "V"), - (0xC91, "X"), - (0xC92, "V"), - (0xCA9, "X"), - (0xCAA, "V"), - (0xCB4, "X"), - (0xCB5, "V"), - (0xCBA, "X"), - (0xCBC, "V"), - (0xCC5, "X"), - (0xCC6, "V"), - (0xCC9, "X"), - (0xCCA, "V"), - (0xCCE, "X"), - (0xCD5, "V"), - (0xCD7, "X"), - (0xCDD, "V"), - (0xCDF, "X"), - (0xCE0, "V"), - (0xCE4, "X"), - (0xCE6, "V"), - (0xCF0, "X"), - (0xCF1, "V"), - (0xCF4, "X"), - (0xD00, "V"), - (0xD0D, "X"), - (0xD0E, "V"), - (0xD11, "X"), - (0xD12, "V"), - (0xD45, "X"), - (0xD46, "V"), - (0xD49, "X"), - (0xD4A, "V"), - (0xD50, "X"), - (0xD54, "V"), - (0xD64, "X"), - (0xD66, "V"), - (0xD80, "X"), - (0xD81, "V"), - (0xD84, "X"), - (0xD85, "V"), - (0xD97, "X"), - (0xD9A, "V"), - (0xDB2, "X"), - (0xDB3, "V"), - (0xDBC, "X"), - (0xDBD, "V"), - (0xDBE, "X"), - (0xDC0, "V"), - (0xDC7, "X"), - (0xDCA, "V"), - (0xDCB, "X"), - (0xDCF, "V"), - (0xDD5, "X"), - (0xDD6, "V"), - (0xDD7, "X"), - (0xDD8, "V"), - (0xDE0, "X"), - (0xDE6, "V"), - (0xDF0, "X"), - (0xDF2, "V"), - (0xDF5, "X"), - (0xE01, "V"), - (0xE33, "M", "ํา"), - (0xE34, "V"), - (0xE3B, "X"), - (0xE3F, "V"), - (0xE5C, "X"), - (0xE81, "V"), - (0xE83, "X"), - (0xE84, "V"), - (0xE85, "X"), - (0xE86, "V"), - (0xE8B, "X"), - (0xE8C, "V"), - (0xEA4, "X"), - (0xEA5, "V"), - (0xEA6, "X"), - (0xEA7, "V"), - (0xEB3, "M", "ໍາ"), - (0xEB4, "V"), - ] - - -def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xEBE, "X"), - (0xEC0, "V"), - (0xEC5, "X"), - (0xEC6, "V"), - (0xEC7, "X"), - (0xEC8, "V"), - (0xECF, "X"), - (0xED0, "V"), - (0xEDA, "X"), - (0xEDC, "M", "ຫນ"), - (0xEDD, "M", "ຫມ"), - (0xEDE, "V"), - (0xEE0, "X"), - (0xF00, "V"), - (0xF0C, "M", "་"), - (0xF0D, "V"), - (0xF43, "M", "གྷ"), - (0xF44, "V"), - (0xF48, "X"), - (0xF49, "V"), - (0xF4D, "M", "ཌྷ"), - (0xF4E, "V"), - (0xF52, "M", "དྷ"), - (0xF53, "V"), - (0xF57, "M", "བྷ"), - (0xF58, "V"), - (0xF5C, "M", "ཛྷ"), - (0xF5D, "V"), - (0xF69, "M", "ཀྵ"), - (0xF6A, "V"), - (0xF6D, "X"), - (0xF71, "V"), - (0xF73, "M", "ཱི"), - (0xF74, "V"), - (0xF75, "M", "ཱུ"), - (0xF76, "M", "ྲྀ"), - (0xF77, "M", "ྲཱྀ"), - (0xF78, "M", "ླྀ"), - (0xF79, "M", "ླཱྀ"), - (0xF7A, "V"), - (0xF81, "M", "ཱྀ"), - (0xF82, "V"), - (0xF93, "M", "ྒྷ"), - (0xF94, "V"), - (0xF98, "X"), - (0xF99, "V"), - (0xF9D, "M", "ྜྷ"), - (0xF9E, "V"), - (0xFA2, "M", "ྡྷ"), - (0xFA3, "V"), - (0xFA7, "M", "ྦྷ"), - (0xFA8, "V"), - (0xFAC, "M", "ྫྷ"), - (0xFAD, "V"), - (0xFB9, "M", "ྐྵ"), - (0xFBA, "V"), - (0xFBD, "X"), - (0xFBE, "V"), - (0xFCD, "X"), - (0xFCE, "V"), - (0xFDB, "X"), - (0x1000, "V"), - (0x10A0, "X"), - (0x10C7, "M", "ⴧ"), - (0x10C8, "X"), - (0x10CD, "M", "ⴭ"), - (0x10CE, "X"), - (0x10D0, "V"), - (0x10FC, "M", "ნ"), - (0x10FD, "V"), - (0x115F, "X"), - (0x1161, "V"), - (0x1249, "X"), - (0x124A, "V"), - (0x124E, "X"), - (0x1250, "V"), - (0x1257, "X"), - (0x1258, "V"), - (0x1259, "X"), - (0x125A, "V"), - (0x125E, "X"), - (0x1260, "V"), - (0x1289, "X"), - (0x128A, "V"), - (0x128E, "X"), - (0x1290, "V"), - (0x12B1, "X"), - (0x12B2, "V"), - (0x12B6, "X"), - (0x12B8, "V"), - (0x12BF, "X"), - (0x12C0, "V"), - (0x12C1, "X"), - (0x12C2, "V"), - (0x12C6, "X"), - (0x12C8, "V"), - (0x12D7, "X"), - (0x12D8, "V"), - (0x1311, "X"), - (0x1312, "V"), - ] - - -def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1316, "X"), - (0x1318, "V"), - (0x135B, "X"), - (0x135D, "V"), - (0x137D, "X"), - (0x1380, "V"), - (0x139A, "X"), - (0x13A0, "V"), - (0x13F6, "X"), - (0x13F8, "M", "Ᏸ"), - (0x13F9, "M", "Ᏹ"), - (0x13FA, "M", "Ᏺ"), - (0x13FB, "M", "Ᏻ"), - (0x13FC, "M", "Ᏼ"), - (0x13FD, "M", "Ᏽ"), - (0x13FE, "X"), - (0x1400, "V"), - (0x1680, "X"), - (0x1681, "V"), - (0x169D, "X"), - (0x16A0, "V"), - (0x16F9, "X"), - (0x1700, "V"), - (0x1716, "X"), - (0x171F, "V"), - (0x1737, "X"), - (0x1740, "V"), - (0x1754, "X"), - (0x1760, "V"), - (0x176D, "X"), - (0x176E, "V"), - (0x1771, "X"), - (0x1772, "V"), - (0x1774, "X"), - (0x1780, "V"), - (0x17B4, "X"), - (0x17B6, "V"), - (0x17DE, "X"), - (0x17E0, "V"), - (0x17EA, "X"), - (0x17F0, "V"), - (0x17FA, "X"), - (0x1800, "V"), - (0x1806, "X"), - (0x1807, "V"), - (0x180B, "I"), - (0x180E, "X"), - (0x180F, "I"), - (0x1810, "V"), - (0x181A, "X"), - (0x1820, "V"), - (0x1879, "X"), - (0x1880, "V"), - (0x18AB, "X"), - (0x18B0, "V"), - (0x18F6, "X"), - (0x1900, "V"), - (0x191F, "X"), - (0x1920, "V"), - (0x192C, "X"), - (0x1930, "V"), - (0x193C, "X"), - (0x1940, "V"), - (0x1941, "X"), - (0x1944, "V"), - (0x196E, "X"), - (0x1970, "V"), - (0x1975, "X"), - (0x1980, "V"), - (0x19AC, "X"), - (0x19B0, "V"), - (0x19CA, "X"), - (0x19D0, "V"), - (0x19DB, "X"), - (0x19DE, "V"), - (0x1A1C, "X"), - (0x1A1E, "V"), - (0x1A5F, "X"), - (0x1A60, "V"), - (0x1A7D, "X"), - (0x1A7F, "V"), - (0x1A8A, "X"), - (0x1A90, "V"), - (0x1A9A, "X"), - (0x1AA0, "V"), - (0x1AAE, "X"), - (0x1AB0, "V"), - (0x1ACF, "X"), - (0x1B00, "V"), - (0x1B4D, "X"), - (0x1B50, "V"), - (0x1B7F, "X"), - (0x1B80, "V"), - (0x1BF4, "X"), - (0x1BFC, "V"), - (0x1C38, "X"), - (0x1C3B, "V"), - (0x1C4A, "X"), - (0x1C4D, "V"), - (0x1C80, "M", "в"), - ] - - -def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1C81, "M", "д"), - (0x1C82, "M", "о"), - (0x1C83, "M", "с"), - (0x1C84, "M", "т"), - (0x1C86, "M", "ъ"), - (0x1C87, "M", "ѣ"), - (0x1C88, "M", "ꙋ"), - (0x1C89, "X"), - (0x1C90, "M", "ა"), - (0x1C91, "M", "ბ"), - (0x1C92, "M", "გ"), - (0x1C93, "M", "დ"), - (0x1C94, "M", "ე"), - (0x1C95, "M", "ვ"), - (0x1C96, "M", "ზ"), - (0x1C97, "M", "თ"), - (0x1C98, "M", "ი"), - (0x1C99, "M", "კ"), - (0x1C9A, "M", "ლ"), - (0x1C9B, "M", "მ"), - (0x1C9C, "M", "ნ"), - (0x1C9D, "M", "ო"), - (0x1C9E, "M", "პ"), - (0x1C9F, "M", "ჟ"), - (0x1CA0, "M", "რ"), - (0x1CA1, "M", "ს"), - (0x1CA2, "M", "ტ"), - (0x1CA3, "M", "უ"), - (0x1CA4, "M", "ფ"), - (0x1CA5, "M", "ქ"), - (0x1CA6, "M", "ღ"), - (0x1CA7, "M", "ყ"), - (0x1CA8, "M", "შ"), - (0x1CA9, "M", "ჩ"), - (0x1CAA, "M", "ც"), - (0x1CAB, "M", "ძ"), - (0x1CAC, "M", "წ"), - (0x1CAD, "M", "ჭ"), - (0x1CAE, "M", "ხ"), - (0x1CAF, "M", "ჯ"), - (0x1CB0, "M", "ჰ"), - (0x1CB1, "M", "ჱ"), - (0x1CB2, "M", "ჲ"), - (0x1CB3, "M", "ჳ"), - (0x1CB4, "M", "ჴ"), - (0x1CB5, "M", "ჵ"), - (0x1CB6, "M", "ჶ"), - (0x1CB7, "M", "ჷ"), - (0x1CB8, "M", "ჸ"), - (0x1CB9, "M", "ჹ"), - (0x1CBA, "M", "ჺ"), - (0x1CBB, "X"), - (0x1CBD, "M", "ჽ"), - (0x1CBE, "M", "ჾ"), - (0x1CBF, "M", "ჿ"), - (0x1CC0, "V"), - (0x1CC8, "X"), - (0x1CD0, "V"), - (0x1CFB, "X"), - (0x1D00, "V"), - (0x1D2C, "M", "a"), - (0x1D2D, "M", "æ"), - (0x1D2E, "M", "b"), - (0x1D2F, "V"), - (0x1D30, "M", "d"), - (0x1D31, "M", "e"), - (0x1D32, "M", "ǝ"), - (0x1D33, "M", "g"), - (0x1D34, "M", "h"), - (0x1D35, "M", "i"), - (0x1D36, "M", "j"), - (0x1D37, "M", "k"), - (0x1D38, "M", "l"), - (0x1D39, "M", "m"), - (0x1D3A, "M", "n"), - (0x1D3B, "V"), - (0x1D3C, "M", "o"), - (0x1D3D, "M", "ȣ"), - (0x1D3E, "M", "p"), - (0x1D3F, "M", "r"), - (0x1D40, "M", "t"), - (0x1D41, "M", "u"), - (0x1D42, "M", "w"), - (0x1D43, "M", "a"), - (0x1D44, "M", "ɐ"), - (0x1D45, "M", "ɑ"), - (0x1D46, "M", "ᴂ"), - (0x1D47, "M", "b"), - (0x1D48, "M", "d"), - (0x1D49, "M", "e"), - (0x1D4A, "M", "ə"), - (0x1D4B, "M", "ɛ"), - (0x1D4C, "M", "ɜ"), - (0x1D4D, "M", "g"), - (0x1D4E, "V"), - (0x1D4F, "M", "k"), - (0x1D50, "M", "m"), - (0x1D51, "M", "ŋ"), - (0x1D52, "M", "o"), - (0x1D53, "M", "ɔ"), - ] - - -def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D54, "M", "ᴖ"), - (0x1D55, "M", "ᴗ"), - (0x1D56, "M", "p"), - (0x1D57, "M", "t"), - (0x1D58, "M", "u"), - (0x1D59, "M", "ᴝ"), - (0x1D5A, "M", "ɯ"), - (0x1D5B, "M", "v"), - (0x1D5C, "M", "ᴥ"), - (0x1D5D, "M", "β"), - (0x1D5E, "M", "γ"), - (0x1D5F, "M", "δ"), - (0x1D60, "M", "φ"), - (0x1D61, "M", "χ"), - (0x1D62, "M", "i"), - (0x1D63, "M", "r"), - (0x1D64, "M", "u"), - (0x1D65, "M", "v"), - (0x1D66, "M", "β"), - (0x1D67, "M", "γ"), - (0x1D68, "M", "ρ"), - (0x1D69, "M", "φ"), - (0x1D6A, "M", "χ"), - (0x1D6B, "V"), - (0x1D78, "M", "н"), - (0x1D79, "V"), - (0x1D9B, "M", "ɒ"), - (0x1D9C, "M", "c"), - (0x1D9D, "M", "ɕ"), - (0x1D9E, "M", "ð"), - (0x1D9F, "M", "ɜ"), - (0x1DA0, "M", "f"), - (0x1DA1, "M", "ɟ"), - (0x1DA2, "M", "ɡ"), - (0x1DA3, "M", "ɥ"), - (0x1DA4, "M", "ɨ"), - (0x1DA5, "M", "ɩ"), - (0x1DA6, "M", "ɪ"), - (0x1DA7, "M", "ᵻ"), - (0x1DA8, "M", "ʝ"), - (0x1DA9, "M", "ɭ"), - (0x1DAA, "M", "ᶅ"), - (0x1DAB, "M", "ʟ"), - (0x1DAC, "M", "ɱ"), - (0x1DAD, "M", "ɰ"), - (0x1DAE, "M", "ɲ"), - (0x1DAF, "M", "ɳ"), - (0x1DB0, "M", "ɴ"), - (0x1DB1, "M", "ɵ"), - (0x1DB2, "M", "ɸ"), - (0x1DB3, "M", "ʂ"), - (0x1DB4, "M", "ʃ"), - (0x1DB5, "M", "ƫ"), - (0x1DB6, "M", "ʉ"), - (0x1DB7, "M", "ʊ"), - (0x1DB8, "M", "ᴜ"), - (0x1DB9, "M", "ʋ"), - (0x1DBA, "M", "ʌ"), - (0x1DBB, "M", "z"), - (0x1DBC, "M", "ʐ"), - (0x1DBD, "M", "ʑ"), - (0x1DBE, "M", "ʒ"), - (0x1DBF, "M", "θ"), - (0x1DC0, "V"), - (0x1E00, "M", "ḁ"), - (0x1E01, "V"), - (0x1E02, "M", "ḃ"), - (0x1E03, "V"), - (0x1E04, "M", "ḅ"), - (0x1E05, "V"), - (0x1E06, "M", "ḇ"), - (0x1E07, "V"), - (0x1E08, "M", "ḉ"), - (0x1E09, "V"), - (0x1E0A, "M", "ḋ"), - (0x1E0B, "V"), - (0x1E0C, "M", "ḍ"), - (0x1E0D, "V"), - (0x1E0E, "M", "ḏ"), - (0x1E0F, "V"), - (0x1E10, "M", "ḑ"), - (0x1E11, "V"), - (0x1E12, "M", "ḓ"), - (0x1E13, "V"), - (0x1E14, "M", "ḕ"), - (0x1E15, "V"), - (0x1E16, "M", "ḗ"), - (0x1E17, "V"), - (0x1E18, "M", "ḙ"), - (0x1E19, "V"), - (0x1E1A, "M", "ḛ"), - (0x1E1B, "V"), - (0x1E1C, "M", "ḝ"), - (0x1E1D, "V"), - (0x1E1E, "M", "ḟ"), - (0x1E1F, "V"), - (0x1E20, "M", "ḡ"), - (0x1E21, "V"), - (0x1E22, "M", "ḣ"), - (0x1E23, "V"), - ] - - -def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E24, "M", "ḥ"), - (0x1E25, "V"), - (0x1E26, "M", "ḧ"), - (0x1E27, "V"), - (0x1E28, "M", "ḩ"), - (0x1E29, "V"), - (0x1E2A, "M", "ḫ"), - (0x1E2B, "V"), - (0x1E2C, "M", "ḭ"), - (0x1E2D, "V"), - (0x1E2E, "M", "ḯ"), - (0x1E2F, "V"), - (0x1E30, "M", "ḱ"), - (0x1E31, "V"), - (0x1E32, "M", "ḳ"), - (0x1E33, "V"), - (0x1E34, "M", "ḵ"), - (0x1E35, "V"), - (0x1E36, "M", "ḷ"), - (0x1E37, "V"), - (0x1E38, "M", "ḹ"), - (0x1E39, "V"), - (0x1E3A, "M", "ḻ"), - (0x1E3B, "V"), - (0x1E3C, "M", "ḽ"), - (0x1E3D, "V"), - (0x1E3E, "M", "ḿ"), - (0x1E3F, "V"), - (0x1E40, "M", "ṁ"), - (0x1E41, "V"), - (0x1E42, "M", "ṃ"), - (0x1E43, "V"), - (0x1E44, "M", "ṅ"), - (0x1E45, "V"), - (0x1E46, "M", "ṇ"), - (0x1E47, "V"), - (0x1E48, "M", "ṉ"), - (0x1E49, "V"), - (0x1E4A, "M", "ṋ"), - (0x1E4B, "V"), - (0x1E4C, "M", "ṍ"), - (0x1E4D, "V"), - (0x1E4E, "M", "ṏ"), - (0x1E4F, "V"), - (0x1E50, "M", "ṑ"), - (0x1E51, "V"), - (0x1E52, "M", "ṓ"), - (0x1E53, "V"), - (0x1E54, "M", "ṕ"), - (0x1E55, "V"), - (0x1E56, "M", "ṗ"), - (0x1E57, "V"), - (0x1E58, "M", "ṙ"), - (0x1E59, "V"), - (0x1E5A, "M", "ṛ"), - (0x1E5B, "V"), - (0x1E5C, "M", "ṝ"), - (0x1E5D, "V"), - (0x1E5E, "M", "ṟ"), - (0x1E5F, "V"), - (0x1E60, "M", "ṡ"), - (0x1E61, "V"), - (0x1E62, "M", "ṣ"), - (0x1E63, "V"), - (0x1E64, "M", "ṥ"), - (0x1E65, "V"), - (0x1E66, "M", "ṧ"), - (0x1E67, "V"), - (0x1E68, "M", "ṩ"), - (0x1E69, "V"), - (0x1E6A, "M", "ṫ"), - (0x1E6B, "V"), - (0x1E6C, "M", "ṭ"), - (0x1E6D, "V"), - (0x1E6E, "M", "ṯ"), - (0x1E6F, "V"), - (0x1E70, "M", "ṱ"), - (0x1E71, "V"), - (0x1E72, "M", "ṳ"), - (0x1E73, "V"), - (0x1E74, "M", "ṵ"), - (0x1E75, "V"), - (0x1E76, "M", "ṷ"), - (0x1E77, "V"), - (0x1E78, "M", "ṹ"), - (0x1E79, "V"), - (0x1E7A, "M", "ṻ"), - (0x1E7B, "V"), - (0x1E7C, "M", "ṽ"), - (0x1E7D, "V"), - (0x1E7E, "M", "ṿ"), - (0x1E7F, "V"), - (0x1E80, "M", "ẁ"), - (0x1E81, "V"), - (0x1E82, "M", "ẃ"), - (0x1E83, "V"), - (0x1E84, "M", "ẅ"), - (0x1E85, "V"), - (0x1E86, "M", "ẇ"), - (0x1E87, "V"), - ] - - -def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E88, "M", "ẉ"), - (0x1E89, "V"), - (0x1E8A, "M", "ẋ"), - (0x1E8B, "V"), - (0x1E8C, "M", "ẍ"), - (0x1E8D, "V"), - (0x1E8E, "M", "ẏ"), - (0x1E8F, "V"), - (0x1E90, "M", "ẑ"), - (0x1E91, "V"), - (0x1E92, "M", "ẓ"), - (0x1E93, "V"), - (0x1E94, "M", "ẕ"), - (0x1E95, "V"), - (0x1E9A, "M", "aʾ"), - (0x1E9B, "M", "ṡ"), - (0x1E9C, "V"), - (0x1E9E, "M", "ß"), - (0x1E9F, "V"), - (0x1EA0, "M", "ạ"), - (0x1EA1, "V"), - (0x1EA2, "M", "ả"), - (0x1EA3, "V"), - (0x1EA4, "M", "ấ"), - (0x1EA5, "V"), - (0x1EA6, "M", "ầ"), - (0x1EA7, "V"), - (0x1EA8, "M", "ẩ"), - (0x1EA9, "V"), - (0x1EAA, "M", "ẫ"), - (0x1EAB, "V"), - (0x1EAC, "M", "ậ"), - (0x1EAD, "V"), - (0x1EAE, "M", "ắ"), - (0x1EAF, "V"), - (0x1EB0, "M", "ằ"), - (0x1EB1, "V"), - (0x1EB2, "M", "ẳ"), - (0x1EB3, "V"), - (0x1EB4, "M", "ẵ"), - (0x1EB5, "V"), - (0x1EB6, "M", "ặ"), - (0x1EB7, "V"), - (0x1EB8, "M", "ẹ"), - (0x1EB9, "V"), - (0x1EBA, "M", "ẻ"), - (0x1EBB, "V"), - (0x1EBC, "M", "ẽ"), - (0x1EBD, "V"), - (0x1EBE, "M", "ế"), - (0x1EBF, "V"), - (0x1EC0, "M", "ề"), - (0x1EC1, "V"), - (0x1EC2, "M", "ể"), - (0x1EC3, "V"), - (0x1EC4, "M", "ễ"), - (0x1EC5, "V"), - (0x1EC6, "M", "ệ"), - (0x1EC7, "V"), - (0x1EC8, "M", "ỉ"), - (0x1EC9, "V"), - (0x1ECA, "M", "ị"), - (0x1ECB, "V"), - (0x1ECC, "M", "ọ"), - (0x1ECD, "V"), - (0x1ECE, "M", "ỏ"), - (0x1ECF, "V"), - (0x1ED0, "M", "ố"), - (0x1ED1, "V"), - (0x1ED2, "M", "ồ"), - (0x1ED3, "V"), - (0x1ED4, "M", "ổ"), - (0x1ED5, "V"), - (0x1ED6, "M", "ỗ"), - (0x1ED7, "V"), - (0x1ED8, "M", "ộ"), - (0x1ED9, "V"), - (0x1EDA, "M", "ớ"), - (0x1EDB, "V"), - (0x1EDC, "M", "ờ"), - (0x1EDD, "V"), - (0x1EDE, "M", "ở"), - (0x1EDF, "V"), - (0x1EE0, "M", "ỡ"), - (0x1EE1, "V"), - (0x1EE2, "M", "ợ"), - (0x1EE3, "V"), - (0x1EE4, "M", "ụ"), - (0x1EE5, "V"), - (0x1EE6, "M", "ủ"), - (0x1EE7, "V"), - (0x1EE8, "M", "ứ"), - (0x1EE9, "V"), - (0x1EEA, "M", "ừ"), - (0x1EEB, "V"), - (0x1EEC, "M", "ử"), - (0x1EED, "V"), - (0x1EEE, "M", "ữ"), - (0x1EEF, "V"), - (0x1EF0, "M", "ự"), - ] - - -def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EF1, "V"), - (0x1EF2, "M", "ỳ"), - (0x1EF3, "V"), - (0x1EF4, "M", "ỵ"), - (0x1EF5, "V"), - (0x1EF6, "M", "ỷ"), - (0x1EF7, "V"), - (0x1EF8, "M", "ỹ"), - (0x1EF9, "V"), - (0x1EFA, "M", "ỻ"), - (0x1EFB, "V"), - (0x1EFC, "M", "ỽ"), - (0x1EFD, "V"), - (0x1EFE, "M", "ỿ"), - (0x1EFF, "V"), - (0x1F08, "M", "ἀ"), - (0x1F09, "M", "ἁ"), - (0x1F0A, "M", "ἂ"), - (0x1F0B, "M", "ἃ"), - (0x1F0C, "M", "ἄ"), - (0x1F0D, "M", "ἅ"), - (0x1F0E, "M", "ἆ"), - (0x1F0F, "M", "ἇ"), - (0x1F10, "V"), - (0x1F16, "X"), - (0x1F18, "M", "ἐ"), - (0x1F19, "M", "ἑ"), - (0x1F1A, "M", "ἒ"), - (0x1F1B, "M", "ἓ"), - (0x1F1C, "M", "ἔ"), - (0x1F1D, "M", "ἕ"), - (0x1F1E, "X"), - (0x1F20, "V"), - (0x1F28, "M", "ἠ"), - (0x1F29, "M", "ἡ"), - (0x1F2A, "M", "ἢ"), - (0x1F2B, "M", "ἣ"), - (0x1F2C, "M", "ἤ"), - (0x1F2D, "M", "ἥ"), - (0x1F2E, "M", "ἦ"), - (0x1F2F, "M", "ἧ"), - (0x1F30, "V"), - (0x1F38, "M", "ἰ"), - (0x1F39, "M", "ἱ"), - (0x1F3A, "M", "ἲ"), - (0x1F3B, "M", "ἳ"), - (0x1F3C, "M", "ἴ"), - (0x1F3D, "M", "ἵ"), - (0x1F3E, "M", "ἶ"), - (0x1F3F, "M", "ἷ"), - (0x1F40, "V"), - (0x1F46, "X"), - (0x1F48, "M", "ὀ"), - (0x1F49, "M", "ὁ"), - (0x1F4A, "M", "ὂ"), - (0x1F4B, "M", "ὃ"), - (0x1F4C, "M", "ὄ"), - (0x1F4D, "M", "ὅ"), - (0x1F4E, "X"), - (0x1F50, "V"), - (0x1F58, "X"), - (0x1F59, "M", "ὑ"), - (0x1F5A, "X"), - (0x1F5B, "M", "ὓ"), - (0x1F5C, "X"), - (0x1F5D, "M", "ὕ"), - (0x1F5E, "X"), - (0x1F5F, "M", "ὗ"), - (0x1F60, "V"), - (0x1F68, "M", "ὠ"), - (0x1F69, "M", "ὡ"), - (0x1F6A, "M", "ὢ"), - (0x1F6B, "M", "ὣ"), - (0x1F6C, "M", "ὤ"), - (0x1F6D, "M", "ὥ"), - (0x1F6E, "M", "ὦ"), - (0x1F6F, "M", "ὧ"), - (0x1F70, "V"), - (0x1F71, "M", "ά"), - (0x1F72, "V"), - (0x1F73, "M", "έ"), - (0x1F74, "V"), - (0x1F75, "M", "ή"), - (0x1F76, "V"), - (0x1F77, "M", "ί"), - (0x1F78, "V"), - (0x1F79, "M", "ό"), - (0x1F7A, "V"), - (0x1F7B, "M", "ύ"), - (0x1F7C, "V"), - (0x1F7D, "M", "ώ"), - (0x1F7E, "X"), - (0x1F80, "M", "ἀι"), - (0x1F81, "M", "ἁι"), - (0x1F82, "M", "ἂι"), - (0x1F83, "M", "ἃι"), - (0x1F84, "M", "ἄι"), - (0x1F85, "M", "ἅι"), - (0x1F86, "M", "ἆι"), - (0x1F87, "M", "ἇι"), - ] - - -def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F88, "M", "ἀι"), - (0x1F89, "M", "ἁι"), - (0x1F8A, "M", "ἂι"), - (0x1F8B, "M", "ἃι"), - (0x1F8C, "M", "ἄι"), - (0x1F8D, "M", "ἅι"), - (0x1F8E, "M", "ἆι"), - (0x1F8F, "M", "ἇι"), - (0x1F90, "M", "ἠι"), - (0x1F91, "M", "ἡι"), - (0x1F92, "M", "ἢι"), - (0x1F93, "M", "ἣι"), - (0x1F94, "M", "ἤι"), - (0x1F95, "M", "ἥι"), - (0x1F96, "M", "ἦι"), - (0x1F97, "M", "ἧι"), - (0x1F98, "M", "ἠι"), - (0x1F99, "M", "ἡι"), - (0x1F9A, "M", "ἢι"), - (0x1F9B, "M", "ἣι"), - (0x1F9C, "M", "ἤι"), - (0x1F9D, "M", "ἥι"), - (0x1F9E, "M", "ἦι"), - (0x1F9F, "M", "ἧι"), - (0x1FA0, "M", "ὠι"), - (0x1FA1, "M", "ὡι"), - (0x1FA2, "M", "ὢι"), - (0x1FA3, "M", "ὣι"), - (0x1FA4, "M", "ὤι"), - (0x1FA5, "M", "ὥι"), - (0x1FA6, "M", "ὦι"), - (0x1FA7, "M", "ὧι"), - (0x1FA8, "M", "ὠι"), - (0x1FA9, "M", "ὡι"), - (0x1FAA, "M", "ὢι"), - (0x1FAB, "M", "ὣι"), - (0x1FAC, "M", "ὤι"), - (0x1FAD, "M", "ὥι"), - (0x1FAE, "M", "ὦι"), - (0x1FAF, "M", "ὧι"), - (0x1FB0, "V"), - (0x1FB2, "M", "ὰι"), - (0x1FB3, "M", "αι"), - (0x1FB4, "M", "άι"), - (0x1FB5, "X"), - (0x1FB6, "V"), - (0x1FB7, "M", "ᾶι"), - (0x1FB8, "M", "ᾰ"), - (0x1FB9, "M", "ᾱ"), - (0x1FBA, "M", "ὰ"), - (0x1FBB, "M", "ά"), - (0x1FBC, "M", "αι"), - (0x1FBD, "3", " ̓"), - (0x1FBE, "M", "ι"), - (0x1FBF, "3", " ̓"), - (0x1FC0, "3", " ͂"), - (0x1FC1, "3", " ̈͂"), - (0x1FC2, "M", "ὴι"), - (0x1FC3, "M", "ηι"), - (0x1FC4, "M", "ήι"), - (0x1FC5, "X"), - (0x1FC6, "V"), - (0x1FC7, "M", "ῆι"), - (0x1FC8, "M", "ὲ"), - (0x1FC9, "M", "έ"), - (0x1FCA, "M", "ὴ"), - (0x1FCB, "M", "ή"), - (0x1FCC, "M", "ηι"), - (0x1FCD, "3", " ̓̀"), - (0x1FCE, "3", " ̓́"), - (0x1FCF, "3", " ̓͂"), - (0x1FD0, "V"), - (0x1FD3, "M", "ΐ"), - (0x1FD4, "X"), - (0x1FD6, "V"), - (0x1FD8, "M", "ῐ"), - (0x1FD9, "M", "ῑ"), - (0x1FDA, "M", "ὶ"), - (0x1FDB, "M", "ί"), - (0x1FDC, "X"), - (0x1FDD, "3", " ̔̀"), - (0x1FDE, "3", " ̔́"), - (0x1FDF, "3", " ̔͂"), - (0x1FE0, "V"), - (0x1FE3, "M", "ΰ"), - (0x1FE4, "V"), - (0x1FE8, "M", "ῠ"), - (0x1FE9, "M", "ῡ"), - (0x1FEA, "M", "ὺ"), - (0x1FEB, "M", "ύ"), - (0x1FEC, "M", "ῥ"), - (0x1FED, "3", " ̈̀"), - (0x1FEE, "3", " ̈́"), - (0x1FEF, "3", "`"), - (0x1FF0, "X"), - (0x1FF2, "M", "ὼι"), - (0x1FF3, "M", "ωι"), - (0x1FF4, "M", "ώι"), - (0x1FF5, "X"), - (0x1FF6, "V"), - ] - - -def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FF7, "M", "ῶι"), - (0x1FF8, "M", "ὸ"), - (0x1FF9, "M", "ό"), - (0x1FFA, "M", "ὼ"), - (0x1FFB, "M", "ώ"), - (0x1FFC, "M", "ωι"), - (0x1FFD, "3", " ́"), - (0x1FFE, "3", " ̔"), - (0x1FFF, "X"), - (0x2000, "3", " "), - (0x200B, "I"), - (0x200C, "D", ""), - (0x200E, "X"), - (0x2010, "V"), - (0x2011, "M", "‐"), - (0x2012, "V"), - (0x2017, "3", " ̳"), - (0x2018, "V"), - (0x2024, "X"), - (0x2027, "V"), - (0x2028, "X"), - (0x202F, "3", " "), - (0x2030, "V"), - (0x2033, "M", "′′"), - (0x2034, "M", "′′′"), - (0x2035, "V"), - (0x2036, "M", "‵‵"), - (0x2037, "M", "‵‵‵"), - (0x2038, "V"), - (0x203C, "3", "!!"), - (0x203D, "V"), - (0x203E, "3", " ̅"), - (0x203F, "V"), - (0x2047, "3", "??"), - (0x2048, "3", "?!"), - (0x2049, "3", "!?"), - (0x204A, "V"), - (0x2057, "M", "′′′′"), - (0x2058, "V"), - (0x205F, "3", " "), - (0x2060, "I"), - (0x2061, "X"), - (0x2064, "I"), - (0x2065, "X"), - (0x2070, "M", "0"), - (0x2071, "M", "i"), - (0x2072, "X"), - (0x2074, "M", "4"), - (0x2075, "M", "5"), - (0x2076, "M", "6"), - (0x2077, "M", "7"), - (0x2078, "M", "8"), - (0x2079, "M", "9"), - (0x207A, "3", "+"), - (0x207B, "M", "−"), - (0x207C, "3", "="), - (0x207D, "3", "("), - (0x207E, "3", ")"), - (0x207F, "M", "n"), - (0x2080, "M", "0"), - (0x2081, "M", "1"), - (0x2082, "M", "2"), - (0x2083, "M", "3"), - (0x2084, "M", "4"), - (0x2085, "M", "5"), - (0x2086, "M", "6"), - (0x2087, "M", "7"), - (0x2088, "M", "8"), - (0x2089, "M", "9"), - (0x208A, "3", "+"), - (0x208B, "M", "−"), - (0x208C, "3", "="), - (0x208D, "3", "("), - (0x208E, "3", ")"), - (0x208F, "X"), - (0x2090, "M", "a"), - (0x2091, "M", "e"), - (0x2092, "M", "o"), - (0x2093, "M", "x"), - (0x2094, "M", "ə"), - (0x2095, "M", "h"), - (0x2096, "M", "k"), - (0x2097, "M", "l"), - (0x2098, "M", "m"), - (0x2099, "M", "n"), - (0x209A, "M", "p"), - (0x209B, "M", "s"), - (0x209C, "M", "t"), - (0x209D, "X"), - (0x20A0, "V"), - (0x20A8, "M", "rs"), - (0x20A9, "V"), - (0x20C1, "X"), - (0x20D0, "V"), - (0x20F1, "X"), - (0x2100, "3", "a/c"), - (0x2101, "3", "a/s"), - (0x2102, "M", "c"), - (0x2103, "M", "°c"), - (0x2104, "V"), - ] - - -def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2105, "3", "c/o"), - (0x2106, "3", "c/u"), - (0x2107, "M", "ɛ"), - (0x2108, "V"), - (0x2109, "M", "°f"), - (0x210A, "M", "g"), - (0x210B, "M", "h"), - (0x210F, "M", "ħ"), - (0x2110, "M", "i"), - (0x2112, "M", "l"), - (0x2114, "V"), - (0x2115, "M", "n"), - (0x2116, "M", "no"), - (0x2117, "V"), - (0x2119, "M", "p"), - (0x211A, "M", "q"), - (0x211B, "M", "r"), - (0x211E, "V"), - (0x2120, "M", "sm"), - (0x2121, "M", "tel"), - (0x2122, "M", "tm"), - (0x2123, "V"), - (0x2124, "M", "z"), - (0x2125, "V"), - (0x2126, "M", "ω"), - (0x2127, "V"), - (0x2128, "M", "z"), - (0x2129, "V"), - (0x212A, "M", "k"), - (0x212B, "M", "å"), - (0x212C, "M", "b"), - (0x212D, "M", "c"), - (0x212E, "V"), - (0x212F, "M", "e"), - (0x2131, "M", "f"), - (0x2132, "X"), - (0x2133, "M", "m"), - (0x2134, "M", "o"), - (0x2135, "M", "א"), - (0x2136, "M", "ב"), - (0x2137, "M", "ג"), - (0x2138, "M", "ד"), - (0x2139, "M", "i"), - (0x213A, "V"), - (0x213B, "M", "fax"), - (0x213C, "M", "π"), - (0x213D, "M", "γ"), - (0x213F, "M", "π"), - (0x2140, "M", "∑"), - (0x2141, "V"), - (0x2145, "M", "d"), - (0x2147, "M", "e"), - (0x2148, "M", "i"), - (0x2149, "M", "j"), - (0x214A, "V"), - (0x2150, "M", "1⁄7"), - (0x2151, "M", "1⁄9"), - (0x2152, "M", "1⁄10"), - (0x2153, "M", "1⁄3"), - (0x2154, "M", "2⁄3"), - (0x2155, "M", "1⁄5"), - (0x2156, "M", "2⁄5"), - (0x2157, "M", "3⁄5"), - (0x2158, "M", "4⁄5"), - (0x2159, "M", "1⁄6"), - (0x215A, "M", "5⁄6"), - (0x215B, "M", "1⁄8"), - (0x215C, "M", "3⁄8"), - (0x215D, "M", "5⁄8"), - (0x215E, "M", "7⁄8"), - (0x215F, "M", "1⁄"), - (0x2160, "M", "i"), - (0x2161, "M", "ii"), - (0x2162, "M", "iii"), - (0x2163, "M", "iv"), - (0x2164, "M", "v"), - (0x2165, "M", "vi"), - (0x2166, "M", "vii"), - (0x2167, "M", "viii"), - (0x2168, "M", "ix"), - (0x2169, "M", "x"), - (0x216A, "M", "xi"), - (0x216B, "M", "xii"), - (0x216C, "M", "l"), - (0x216D, "M", "c"), - (0x216E, "M", "d"), - (0x216F, "M", "m"), - (0x2170, "M", "i"), - (0x2171, "M", "ii"), - (0x2172, "M", "iii"), - (0x2173, "M", "iv"), - (0x2174, "M", "v"), - (0x2175, "M", "vi"), - (0x2176, "M", "vii"), - (0x2177, "M", "viii"), - (0x2178, "M", "ix"), - (0x2179, "M", "x"), - (0x217A, "M", "xi"), - (0x217B, "M", "xii"), - (0x217C, "M", "l"), - ] - - -def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x217D, "M", "c"), - (0x217E, "M", "d"), - (0x217F, "M", "m"), - (0x2180, "V"), - (0x2183, "X"), - (0x2184, "V"), - (0x2189, "M", "0⁄3"), - (0x218A, "V"), - (0x218C, "X"), - (0x2190, "V"), - (0x222C, "M", "∫∫"), - (0x222D, "M", "∫∫∫"), - (0x222E, "V"), - (0x222F, "M", "∮∮"), - (0x2230, "M", "∮∮∮"), - (0x2231, "V"), - (0x2329, "M", "〈"), - (0x232A, "M", "〉"), - (0x232B, "V"), - (0x2427, "X"), - (0x2440, "V"), - (0x244B, "X"), - (0x2460, "M", "1"), - (0x2461, "M", "2"), - (0x2462, "M", "3"), - (0x2463, "M", "4"), - (0x2464, "M", "5"), - (0x2465, "M", "6"), - (0x2466, "M", "7"), - (0x2467, "M", "8"), - (0x2468, "M", "9"), - (0x2469, "M", "10"), - (0x246A, "M", "11"), - (0x246B, "M", "12"), - (0x246C, "M", "13"), - (0x246D, "M", "14"), - (0x246E, "M", "15"), - (0x246F, "M", "16"), - (0x2470, "M", "17"), - (0x2471, "M", "18"), - (0x2472, "M", "19"), - (0x2473, "M", "20"), - (0x2474, "3", "(1)"), - (0x2475, "3", "(2)"), - (0x2476, "3", "(3)"), - (0x2477, "3", "(4)"), - (0x2478, "3", "(5)"), - (0x2479, "3", "(6)"), - (0x247A, "3", "(7)"), - (0x247B, "3", "(8)"), - (0x247C, "3", "(9)"), - (0x247D, "3", "(10)"), - (0x247E, "3", "(11)"), - (0x247F, "3", "(12)"), - (0x2480, "3", "(13)"), - (0x2481, "3", "(14)"), - (0x2482, "3", "(15)"), - (0x2483, "3", "(16)"), - (0x2484, "3", "(17)"), - (0x2485, "3", "(18)"), - (0x2486, "3", "(19)"), - (0x2487, "3", "(20)"), - (0x2488, "X"), - (0x249C, "3", "(a)"), - (0x249D, "3", "(b)"), - (0x249E, "3", "(c)"), - (0x249F, "3", "(d)"), - (0x24A0, "3", "(e)"), - (0x24A1, "3", "(f)"), - (0x24A2, "3", "(g)"), - (0x24A3, "3", "(h)"), - (0x24A4, "3", "(i)"), - (0x24A5, "3", "(j)"), - (0x24A6, "3", "(k)"), - (0x24A7, "3", "(l)"), - (0x24A8, "3", "(m)"), - (0x24A9, "3", "(n)"), - (0x24AA, "3", "(o)"), - (0x24AB, "3", "(p)"), - (0x24AC, "3", "(q)"), - (0x24AD, "3", "(r)"), - (0x24AE, "3", "(s)"), - (0x24AF, "3", "(t)"), - (0x24B0, "3", "(u)"), - (0x24B1, "3", "(v)"), - (0x24B2, "3", "(w)"), - (0x24B3, "3", "(x)"), - (0x24B4, "3", "(y)"), - (0x24B5, "3", "(z)"), - (0x24B6, "M", "a"), - (0x24B7, "M", "b"), - (0x24B8, "M", "c"), - (0x24B9, "M", "d"), - (0x24BA, "M", "e"), - (0x24BB, "M", "f"), - (0x24BC, "M", "g"), - (0x24BD, "M", "h"), - (0x24BE, "M", "i"), - (0x24BF, "M", "j"), - (0x24C0, "M", "k"), - ] - - -def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x24C1, "M", "l"), - (0x24C2, "M", "m"), - (0x24C3, "M", "n"), - (0x24C4, "M", "o"), - (0x24C5, "M", "p"), - (0x24C6, "M", "q"), - (0x24C7, "M", "r"), - (0x24C8, "M", "s"), - (0x24C9, "M", "t"), - (0x24CA, "M", "u"), - (0x24CB, "M", "v"), - (0x24CC, "M", "w"), - (0x24CD, "M", "x"), - (0x24CE, "M", "y"), - (0x24CF, "M", "z"), - (0x24D0, "M", "a"), - (0x24D1, "M", "b"), - (0x24D2, "M", "c"), - (0x24D3, "M", "d"), - (0x24D4, "M", "e"), - (0x24D5, "M", "f"), - (0x24D6, "M", "g"), - (0x24D7, "M", "h"), - (0x24D8, "M", "i"), - (0x24D9, "M", "j"), - (0x24DA, "M", "k"), - (0x24DB, "M", "l"), - (0x24DC, "M", "m"), - (0x24DD, "M", "n"), - (0x24DE, "M", "o"), - (0x24DF, "M", "p"), - (0x24E0, "M", "q"), - (0x24E1, "M", "r"), - (0x24E2, "M", "s"), - (0x24E3, "M", "t"), - (0x24E4, "M", "u"), - (0x24E5, "M", "v"), - (0x24E6, "M", "w"), - (0x24E7, "M", "x"), - (0x24E8, "M", "y"), - (0x24E9, "M", "z"), - (0x24EA, "M", "0"), - (0x24EB, "V"), - (0x2A0C, "M", "∫∫∫∫"), - (0x2A0D, "V"), - (0x2A74, "3", "::="), - (0x2A75, "3", "=="), - (0x2A76, "3", "==="), - (0x2A77, "V"), - (0x2ADC, "M", "⫝̸"), - (0x2ADD, "V"), - (0x2B74, "X"), - (0x2B76, "V"), - (0x2B96, "X"), - (0x2B97, "V"), - (0x2C00, "M", "ⰰ"), - (0x2C01, "M", "ⰱ"), - (0x2C02, "M", "ⰲ"), - (0x2C03, "M", "ⰳ"), - (0x2C04, "M", "ⰴ"), - (0x2C05, "M", "ⰵ"), - (0x2C06, "M", "ⰶ"), - (0x2C07, "M", "ⰷ"), - (0x2C08, "M", "ⰸ"), - (0x2C09, "M", "ⰹ"), - (0x2C0A, "M", "ⰺ"), - (0x2C0B, "M", "ⰻ"), - (0x2C0C, "M", "ⰼ"), - (0x2C0D, "M", "ⰽ"), - (0x2C0E, "M", "ⰾ"), - (0x2C0F, "M", "ⰿ"), - (0x2C10, "M", "ⱀ"), - (0x2C11, "M", "ⱁ"), - (0x2C12, "M", "ⱂ"), - (0x2C13, "M", "ⱃ"), - (0x2C14, "M", "ⱄ"), - (0x2C15, "M", "ⱅ"), - (0x2C16, "M", "ⱆ"), - (0x2C17, "M", "ⱇ"), - (0x2C18, "M", "ⱈ"), - (0x2C19, "M", "ⱉ"), - (0x2C1A, "M", "ⱊ"), - (0x2C1B, "M", "ⱋ"), - (0x2C1C, "M", "ⱌ"), - (0x2C1D, "M", "ⱍ"), - (0x2C1E, "M", "ⱎ"), - (0x2C1F, "M", "ⱏ"), - (0x2C20, "M", "ⱐ"), - (0x2C21, "M", "ⱑ"), - (0x2C22, "M", "ⱒ"), - (0x2C23, "M", "ⱓ"), - (0x2C24, "M", "ⱔ"), - (0x2C25, "M", "ⱕ"), - (0x2C26, "M", "ⱖ"), - (0x2C27, "M", "ⱗ"), - (0x2C28, "M", "ⱘ"), - (0x2C29, "M", "ⱙ"), - (0x2C2A, "M", "ⱚ"), - (0x2C2B, "M", "ⱛ"), - (0x2C2C, "M", "ⱜ"), - ] - - -def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2C2D, "M", "ⱝ"), - (0x2C2E, "M", "ⱞ"), - (0x2C2F, "M", "ⱟ"), - (0x2C30, "V"), - (0x2C60, "M", "ⱡ"), - (0x2C61, "V"), - (0x2C62, "M", "ɫ"), - (0x2C63, "M", "ᵽ"), - (0x2C64, "M", "ɽ"), - (0x2C65, "V"), - (0x2C67, "M", "ⱨ"), - (0x2C68, "V"), - (0x2C69, "M", "ⱪ"), - (0x2C6A, "V"), - (0x2C6B, "M", "ⱬ"), - (0x2C6C, "V"), - (0x2C6D, "M", "ɑ"), - (0x2C6E, "M", "ɱ"), - (0x2C6F, "M", "ɐ"), - (0x2C70, "M", "ɒ"), - (0x2C71, "V"), - (0x2C72, "M", "ⱳ"), - (0x2C73, "V"), - (0x2C75, "M", "ⱶ"), - (0x2C76, "V"), - (0x2C7C, "M", "j"), - (0x2C7D, "M", "v"), - (0x2C7E, "M", "ȿ"), - (0x2C7F, "M", "ɀ"), - (0x2C80, "M", "ⲁ"), - (0x2C81, "V"), - (0x2C82, "M", "ⲃ"), - (0x2C83, "V"), - (0x2C84, "M", "ⲅ"), - (0x2C85, "V"), - (0x2C86, "M", "ⲇ"), - (0x2C87, "V"), - (0x2C88, "M", "ⲉ"), - (0x2C89, "V"), - (0x2C8A, "M", "ⲋ"), - (0x2C8B, "V"), - (0x2C8C, "M", "ⲍ"), - (0x2C8D, "V"), - (0x2C8E, "M", "ⲏ"), - (0x2C8F, "V"), - (0x2C90, "M", "ⲑ"), - (0x2C91, "V"), - (0x2C92, "M", "ⲓ"), - (0x2C93, "V"), - (0x2C94, "M", "ⲕ"), - (0x2C95, "V"), - (0x2C96, "M", "ⲗ"), - (0x2C97, "V"), - (0x2C98, "M", "ⲙ"), - (0x2C99, "V"), - (0x2C9A, "M", "ⲛ"), - (0x2C9B, "V"), - (0x2C9C, "M", "ⲝ"), - (0x2C9D, "V"), - (0x2C9E, "M", "ⲟ"), - (0x2C9F, "V"), - (0x2CA0, "M", "ⲡ"), - (0x2CA1, "V"), - (0x2CA2, "M", "ⲣ"), - (0x2CA3, "V"), - (0x2CA4, "M", "ⲥ"), - (0x2CA5, "V"), - (0x2CA6, "M", "ⲧ"), - (0x2CA7, "V"), - (0x2CA8, "M", "ⲩ"), - (0x2CA9, "V"), - (0x2CAA, "M", "ⲫ"), - (0x2CAB, "V"), - (0x2CAC, "M", "ⲭ"), - (0x2CAD, "V"), - (0x2CAE, "M", "ⲯ"), - (0x2CAF, "V"), - (0x2CB0, "M", "ⲱ"), - (0x2CB1, "V"), - (0x2CB2, "M", "ⲳ"), - (0x2CB3, "V"), - (0x2CB4, "M", "ⲵ"), - (0x2CB5, "V"), - (0x2CB6, "M", "ⲷ"), - (0x2CB7, "V"), - (0x2CB8, "M", "ⲹ"), - (0x2CB9, "V"), - (0x2CBA, "M", "ⲻ"), - (0x2CBB, "V"), - (0x2CBC, "M", "ⲽ"), - (0x2CBD, "V"), - (0x2CBE, "M", "ⲿ"), - (0x2CBF, "V"), - (0x2CC0, "M", "ⳁ"), - (0x2CC1, "V"), - (0x2CC2, "M", "ⳃ"), - (0x2CC3, "V"), - (0x2CC4, "M", "ⳅ"), - (0x2CC5, "V"), - (0x2CC6, "M", "ⳇ"), - ] - - -def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2CC7, "V"), - (0x2CC8, "M", "ⳉ"), - (0x2CC9, "V"), - (0x2CCA, "M", "ⳋ"), - (0x2CCB, "V"), - (0x2CCC, "M", "ⳍ"), - (0x2CCD, "V"), - (0x2CCE, "M", "ⳏ"), - (0x2CCF, "V"), - (0x2CD0, "M", "ⳑ"), - (0x2CD1, "V"), - (0x2CD2, "M", "ⳓ"), - (0x2CD3, "V"), - (0x2CD4, "M", "ⳕ"), - (0x2CD5, "V"), - (0x2CD6, "M", "ⳗ"), - (0x2CD7, "V"), - (0x2CD8, "M", "ⳙ"), - (0x2CD9, "V"), - (0x2CDA, "M", "ⳛ"), - (0x2CDB, "V"), - (0x2CDC, "M", "ⳝ"), - (0x2CDD, "V"), - (0x2CDE, "M", "ⳟ"), - (0x2CDF, "V"), - (0x2CE0, "M", "ⳡ"), - (0x2CE1, "V"), - (0x2CE2, "M", "ⳣ"), - (0x2CE3, "V"), - (0x2CEB, "M", "ⳬ"), - (0x2CEC, "V"), - (0x2CED, "M", "ⳮ"), - (0x2CEE, "V"), - (0x2CF2, "M", "ⳳ"), - (0x2CF3, "V"), - (0x2CF4, "X"), - (0x2CF9, "V"), - (0x2D26, "X"), - (0x2D27, "V"), - (0x2D28, "X"), - (0x2D2D, "V"), - (0x2D2E, "X"), - (0x2D30, "V"), - (0x2D68, "X"), - (0x2D6F, "M", "ⵡ"), - (0x2D70, "V"), - (0x2D71, "X"), - (0x2D7F, "V"), - (0x2D97, "X"), - (0x2DA0, "V"), - (0x2DA7, "X"), - (0x2DA8, "V"), - (0x2DAF, "X"), - (0x2DB0, "V"), - (0x2DB7, "X"), - (0x2DB8, "V"), - (0x2DBF, "X"), - (0x2DC0, "V"), - (0x2DC7, "X"), - (0x2DC8, "V"), - (0x2DCF, "X"), - (0x2DD0, "V"), - (0x2DD7, "X"), - (0x2DD8, "V"), - (0x2DDF, "X"), - (0x2DE0, "V"), - (0x2E5E, "X"), - (0x2E80, "V"), - (0x2E9A, "X"), - (0x2E9B, "V"), - (0x2E9F, "M", "母"), - (0x2EA0, "V"), - (0x2EF3, "M", "龟"), - (0x2EF4, "X"), - (0x2F00, "M", "一"), - (0x2F01, "M", "丨"), - (0x2F02, "M", "丶"), - (0x2F03, "M", "丿"), - (0x2F04, "M", "乙"), - (0x2F05, "M", "亅"), - (0x2F06, "M", "二"), - (0x2F07, "M", "亠"), - (0x2F08, "M", "人"), - (0x2F09, "M", "儿"), - (0x2F0A, "M", "入"), - (0x2F0B, "M", "八"), - (0x2F0C, "M", "冂"), - (0x2F0D, "M", "冖"), - (0x2F0E, "M", "冫"), - (0x2F0F, "M", "几"), - (0x2F10, "M", "凵"), - (0x2F11, "M", "刀"), - (0x2F12, "M", "力"), - (0x2F13, "M", "勹"), - (0x2F14, "M", "匕"), - (0x2F15, "M", "匚"), - (0x2F16, "M", "匸"), - (0x2F17, "M", "十"), - (0x2F18, "M", "卜"), - (0x2F19, "M", "卩"), - ] - - -def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F1A, "M", "厂"), - (0x2F1B, "M", "厶"), - (0x2F1C, "M", "又"), - (0x2F1D, "M", "口"), - (0x2F1E, "M", "囗"), - (0x2F1F, "M", "土"), - (0x2F20, "M", "士"), - (0x2F21, "M", "夂"), - (0x2F22, "M", "夊"), - (0x2F23, "M", "夕"), - (0x2F24, "M", "大"), - (0x2F25, "M", "女"), - (0x2F26, "M", "子"), - (0x2F27, "M", "宀"), - (0x2F28, "M", "寸"), - (0x2F29, "M", "小"), - (0x2F2A, "M", "尢"), - (0x2F2B, "M", "尸"), - (0x2F2C, "M", "屮"), - (0x2F2D, "M", "山"), - (0x2F2E, "M", "巛"), - (0x2F2F, "M", "工"), - (0x2F30, "M", "己"), - (0x2F31, "M", "巾"), - (0x2F32, "M", "干"), - (0x2F33, "M", "幺"), - (0x2F34, "M", "广"), - (0x2F35, "M", "廴"), - (0x2F36, "M", "廾"), - (0x2F37, "M", "弋"), - (0x2F38, "M", "弓"), - (0x2F39, "M", "彐"), - (0x2F3A, "M", "彡"), - (0x2F3B, "M", "彳"), - (0x2F3C, "M", "心"), - (0x2F3D, "M", "戈"), - (0x2F3E, "M", "戶"), - (0x2F3F, "M", "手"), - (0x2F40, "M", "支"), - (0x2F41, "M", "攴"), - (0x2F42, "M", "文"), - (0x2F43, "M", "斗"), - (0x2F44, "M", "斤"), - (0x2F45, "M", "方"), - (0x2F46, "M", "无"), - (0x2F47, "M", "日"), - (0x2F48, "M", "曰"), - (0x2F49, "M", "月"), - (0x2F4A, "M", "木"), - (0x2F4B, "M", "欠"), - (0x2F4C, "M", "止"), - (0x2F4D, "M", "歹"), - (0x2F4E, "M", "殳"), - (0x2F4F, "M", "毋"), - (0x2F50, "M", "比"), - (0x2F51, "M", "毛"), - (0x2F52, "M", "氏"), - (0x2F53, "M", "气"), - (0x2F54, "M", "水"), - (0x2F55, "M", "火"), - (0x2F56, "M", "爪"), - (0x2F57, "M", "父"), - (0x2F58, "M", "爻"), - (0x2F59, "M", "爿"), - (0x2F5A, "M", "片"), - (0x2F5B, "M", "牙"), - (0x2F5C, "M", "牛"), - (0x2F5D, "M", "犬"), - (0x2F5E, "M", "玄"), - (0x2F5F, "M", "玉"), - (0x2F60, "M", "瓜"), - (0x2F61, "M", "瓦"), - (0x2F62, "M", "甘"), - (0x2F63, "M", "生"), - (0x2F64, "M", "用"), - (0x2F65, "M", "田"), - (0x2F66, "M", "疋"), - (0x2F67, "M", "疒"), - (0x2F68, "M", "癶"), - (0x2F69, "M", "白"), - (0x2F6A, "M", "皮"), - (0x2F6B, "M", "皿"), - (0x2F6C, "M", "目"), - (0x2F6D, "M", "矛"), - (0x2F6E, "M", "矢"), - (0x2F6F, "M", "石"), - (0x2F70, "M", "示"), - (0x2F71, "M", "禸"), - (0x2F72, "M", "禾"), - (0x2F73, "M", "穴"), - (0x2F74, "M", "立"), - (0x2F75, "M", "竹"), - (0x2F76, "M", "米"), - (0x2F77, "M", "糸"), - (0x2F78, "M", "缶"), - (0x2F79, "M", "网"), - (0x2F7A, "M", "羊"), - (0x2F7B, "M", "羽"), - (0x2F7C, "M", "老"), - (0x2F7D, "M", "而"), - ] - - -def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F7E, "M", "耒"), - (0x2F7F, "M", "耳"), - (0x2F80, "M", "聿"), - (0x2F81, "M", "肉"), - (0x2F82, "M", "臣"), - (0x2F83, "M", "自"), - (0x2F84, "M", "至"), - (0x2F85, "M", "臼"), - (0x2F86, "M", "舌"), - (0x2F87, "M", "舛"), - (0x2F88, "M", "舟"), - (0x2F89, "M", "艮"), - (0x2F8A, "M", "色"), - (0x2F8B, "M", "艸"), - (0x2F8C, "M", "虍"), - (0x2F8D, "M", "虫"), - (0x2F8E, "M", "血"), - (0x2F8F, "M", "行"), - (0x2F90, "M", "衣"), - (0x2F91, "M", "襾"), - (0x2F92, "M", "見"), - (0x2F93, "M", "角"), - (0x2F94, "M", "言"), - (0x2F95, "M", "谷"), - (0x2F96, "M", "豆"), - (0x2F97, "M", "豕"), - (0x2F98, "M", "豸"), - (0x2F99, "M", "貝"), - (0x2F9A, "M", "赤"), - (0x2F9B, "M", "走"), - (0x2F9C, "M", "足"), - (0x2F9D, "M", "身"), - (0x2F9E, "M", "車"), - (0x2F9F, "M", "辛"), - (0x2FA0, "M", "辰"), - (0x2FA1, "M", "辵"), - (0x2FA2, "M", "邑"), - (0x2FA3, "M", "酉"), - (0x2FA4, "M", "釆"), - (0x2FA5, "M", "里"), - (0x2FA6, "M", "金"), - (0x2FA7, "M", "長"), - (0x2FA8, "M", "門"), - (0x2FA9, "M", "阜"), - (0x2FAA, "M", "隶"), - (0x2FAB, "M", "隹"), - (0x2FAC, "M", "雨"), - (0x2FAD, "M", "靑"), - (0x2FAE, "M", "非"), - (0x2FAF, "M", "面"), - (0x2FB0, "M", "革"), - (0x2FB1, "M", "韋"), - (0x2FB2, "M", "韭"), - (0x2FB3, "M", "音"), - (0x2FB4, "M", "頁"), - (0x2FB5, "M", "風"), - (0x2FB6, "M", "飛"), - (0x2FB7, "M", "食"), - (0x2FB8, "M", "首"), - (0x2FB9, "M", "香"), - (0x2FBA, "M", "馬"), - (0x2FBB, "M", "骨"), - (0x2FBC, "M", "高"), - (0x2FBD, "M", "髟"), - (0x2FBE, "M", "鬥"), - (0x2FBF, "M", "鬯"), - (0x2FC0, "M", "鬲"), - (0x2FC1, "M", "鬼"), - (0x2FC2, "M", "魚"), - (0x2FC3, "M", "鳥"), - (0x2FC4, "M", "鹵"), - (0x2FC5, "M", "鹿"), - (0x2FC6, "M", "麥"), - (0x2FC7, "M", "麻"), - (0x2FC8, "M", "黃"), - (0x2FC9, "M", "黍"), - (0x2FCA, "M", "黑"), - (0x2FCB, "M", "黹"), - (0x2FCC, "M", "黽"), - (0x2FCD, "M", "鼎"), - (0x2FCE, "M", "鼓"), - (0x2FCF, "M", "鼠"), - (0x2FD0, "M", "鼻"), - (0x2FD1, "M", "齊"), - (0x2FD2, "M", "齒"), - (0x2FD3, "M", "龍"), - (0x2FD4, "M", "龜"), - (0x2FD5, "M", "龠"), - (0x2FD6, "X"), - (0x3000, "3", " "), - (0x3001, "V"), - (0x3002, "M", "."), - (0x3003, "V"), - (0x3036, "M", "〒"), - (0x3037, "V"), - (0x3038, "M", "十"), - (0x3039, "M", "卄"), - (0x303A, "M", "卅"), - (0x303B, "V"), - (0x3040, "X"), - ] - - -def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3041, "V"), - (0x3097, "X"), - (0x3099, "V"), - (0x309B, "3", " ゙"), - (0x309C, "3", " ゚"), - (0x309D, "V"), - (0x309F, "M", "より"), - (0x30A0, "V"), - (0x30FF, "M", "コト"), - (0x3100, "X"), - (0x3105, "V"), - (0x3130, "X"), - (0x3131, "M", "ᄀ"), - (0x3132, "M", "ᄁ"), - (0x3133, "M", "ᆪ"), - (0x3134, "M", "ᄂ"), - (0x3135, "M", "ᆬ"), - (0x3136, "M", "ᆭ"), - (0x3137, "M", "ᄃ"), - (0x3138, "M", "ᄄ"), - (0x3139, "M", "ᄅ"), - (0x313A, "M", "ᆰ"), - (0x313B, "M", "ᆱ"), - (0x313C, "M", "ᆲ"), - (0x313D, "M", "ᆳ"), - (0x313E, "M", "ᆴ"), - (0x313F, "M", "ᆵ"), - (0x3140, "M", "ᄚ"), - (0x3141, "M", "ᄆ"), - (0x3142, "M", "ᄇ"), - (0x3143, "M", "ᄈ"), - (0x3144, "M", "ᄡ"), - (0x3145, "M", "ᄉ"), - (0x3146, "M", "ᄊ"), - (0x3147, "M", "ᄋ"), - (0x3148, "M", "ᄌ"), - (0x3149, "M", "ᄍ"), - (0x314A, "M", "ᄎ"), - (0x314B, "M", "ᄏ"), - (0x314C, "M", "ᄐ"), - (0x314D, "M", "ᄑ"), - (0x314E, "M", "ᄒ"), - (0x314F, "M", "ᅡ"), - (0x3150, "M", "ᅢ"), - (0x3151, "M", "ᅣ"), - (0x3152, "M", "ᅤ"), - (0x3153, "M", "ᅥ"), - (0x3154, "M", "ᅦ"), - (0x3155, "M", "ᅧ"), - (0x3156, "M", "ᅨ"), - (0x3157, "M", "ᅩ"), - (0x3158, "M", "ᅪ"), - (0x3159, "M", "ᅫ"), - (0x315A, "M", "ᅬ"), - (0x315B, "M", "ᅭ"), - (0x315C, "M", "ᅮ"), - (0x315D, "M", "ᅯ"), - (0x315E, "M", "ᅰ"), - (0x315F, "M", "ᅱ"), - (0x3160, "M", "ᅲ"), - (0x3161, "M", "ᅳ"), - (0x3162, "M", "ᅴ"), - (0x3163, "M", "ᅵ"), - (0x3164, "X"), - (0x3165, "M", "ᄔ"), - (0x3166, "M", "ᄕ"), - (0x3167, "M", "ᇇ"), - (0x3168, "M", "ᇈ"), - (0x3169, "M", "ᇌ"), - (0x316A, "M", "ᇎ"), - (0x316B, "M", "ᇓ"), - (0x316C, "M", "ᇗ"), - (0x316D, "M", "ᇙ"), - (0x316E, "M", "ᄜ"), - (0x316F, "M", "ᇝ"), - (0x3170, "M", "ᇟ"), - (0x3171, "M", "ᄝ"), - (0x3172, "M", "ᄞ"), - (0x3173, "M", "ᄠ"), - (0x3174, "M", "ᄢ"), - (0x3175, "M", "ᄣ"), - (0x3176, "M", "ᄧ"), - (0x3177, "M", "ᄩ"), - (0x3178, "M", "ᄫ"), - (0x3179, "M", "ᄬ"), - (0x317A, "M", "ᄭ"), - (0x317B, "M", "ᄮ"), - (0x317C, "M", "ᄯ"), - (0x317D, "M", "ᄲ"), - (0x317E, "M", "ᄶ"), - (0x317F, "M", "ᅀ"), - (0x3180, "M", "ᅇ"), - (0x3181, "M", "ᅌ"), - (0x3182, "M", "ᇱ"), - (0x3183, "M", "ᇲ"), - (0x3184, "M", "ᅗ"), - (0x3185, "M", "ᅘ"), - (0x3186, "M", "ᅙ"), - (0x3187, "M", "ᆄ"), - (0x3188, "M", "ᆅ"), - ] - - -def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3189, "M", "ᆈ"), - (0x318A, "M", "ᆑ"), - (0x318B, "M", "ᆒ"), - (0x318C, "M", "ᆔ"), - (0x318D, "M", "ᆞ"), - (0x318E, "M", "ᆡ"), - (0x318F, "X"), - (0x3190, "V"), - (0x3192, "M", "一"), - (0x3193, "M", "二"), - (0x3194, "M", "三"), - (0x3195, "M", "四"), - (0x3196, "M", "上"), - (0x3197, "M", "中"), - (0x3198, "M", "下"), - (0x3199, "M", "甲"), - (0x319A, "M", "乙"), - (0x319B, "M", "丙"), - (0x319C, "M", "丁"), - (0x319D, "M", "天"), - (0x319E, "M", "地"), - (0x319F, "M", "人"), - (0x31A0, "V"), - (0x31E4, "X"), - (0x31F0, "V"), - (0x3200, "3", "(ᄀ)"), - (0x3201, "3", "(ᄂ)"), - (0x3202, "3", "(ᄃ)"), - (0x3203, "3", "(ᄅ)"), - (0x3204, "3", "(ᄆ)"), - (0x3205, "3", "(ᄇ)"), - (0x3206, "3", "(ᄉ)"), - (0x3207, "3", "(ᄋ)"), - (0x3208, "3", "(ᄌ)"), - (0x3209, "3", "(ᄎ)"), - (0x320A, "3", "(ᄏ)"), - (0x320B, "3", "(ᄐ)"), - (0x320C, "3", "(ᄑ)"), - (0x320D, "3", "(ᄒ)"), - (0x320E, "3", "(가)"), - (0x320F, "3", "(나)"), - (0x3210, "3", "(다)"), - (0x3211, "3", "(라)"), - (0x3212, "3", "(마)"), - (0x3213, "3", "(바)"), - (0x3214, "3", "(사)"), - (0x3215, "3", "(아)"), - (0x3216, "3", "(자)"), - (0x3217, "3", "(차)"), - (0x3218, "3", "(카)"), - (0x3219, "3", "(타)"), - (0x321A, "3", "(파)"), - (0x321B, "3", "(하)"), - (0x321C, "3", "(주)"), - (0x321D, "3", "(오전)"), - (0x321E, "3", "(오후)"), - (0x321F, "X"), - (0x3220, "3", "(一)"), - (0x3221, "3", "(二)"), - (0x3222, "3", "(三)"), - (0x3223, "3", "(四)"), - (0x3224, "3", "(五)"), - (0x3225, "3", "(六)"), - (0x3226, "3", "(七)"), - (0x3227, "3", "(八)"), - (0x3228, "3", "(九)"), - (0x3229, "3", "(十)"), - (0x322A, "3", "(月)"), - (0x322B, "3", "(火)"), - (0x322C, "3", "(水)"), - (0x322D, "3", "(木)"), - (0x322E, "3", "(金)"), - (0x322F, "3", "(土)"), - (0x3230, "3", "(日)"), - (0x3231, "3", "(株)"), - (0x3232, "3", "(有)"), - (0x3233, "3", "(社)"), - (0x3234, "3", "(名)"), - (0x3235, "3", "(特)"), - (0x3236, "3", "(財)"), - (0x3237, "3", "(祝)"), - (0x3238, "3", "(労)"), - (0x3239, "3", "(代)"), - (0x323A, "3", "(呼)"), - (0x323B, "3", "(学)"), - (0x323C, "3", "(監)"), - (0x323D, "3", "(企)"), - (0x323E, "3", "(資)"), - (0x323F, "3", "(協)"), - (0x3240, "3", "(祭)"), - (0x3241, "3", "(休)"), - (0x3242, "3", "(自)"), - (0x3243, "3", "(至)"), - (0x3244, "M", "問"), - (0x3245, "M", "幼"), - (0x3246, "M", "文"), - (0x3247, "M", "箏"), - (0x3248, "V"), - (0x3250, "M", "pte"), - (0x3251, "M", "21"), - ] - - -def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3252, "M", "22"), - (0x3253, "M", "23"), - (0x3254, "M", "24"), - (0x3255, "M", "25"), - (0x3256, "M", "26"), - (0x3257, "M", "27"), - (0x3258, "M", "28"), - (0x3259, "M", "29"), - (0x325A, "M", "30"), - (0x325B, "M", "31"), - (0x325C, "M", "32"), - (0x325D, "M", "33"), - (0x325E, "M", "34"), - (0x325F, "M", "35"), - (0x3260, "M", "ᄀ"), - (0x3261, "M", "ᄂ"), - (0x3262, "M", "ᄃ"), - (0x3263, "M", "ᄅ"), - (0x3264, "M", "ᄆ"), - (0x3265, "M", "ᄇ"), - (0x3266, "M", "ᄉ"), - (0x3267, "M", "ᄋ"), - (0x3268, "M", "ᄌ"), - (0x3269, "M", "ᄎ"), - (0x326A, "M", "ᄏ"), - (0x326B, "M", "ᄐ"), - (0x326C, "M", "ᄑ"), - (0x326D, "M", "ᄒ"), - (0x326E, "M", "가"), - (0x326F, "M", "나"), - (0x3270, "M", "다"), - (0x3271, "M", "라"), - (0x3272, "M", "마"), - (0x3273, "M", "바"), - (0x3274, "M", "사"), - (0x3275, "M", "아"), - (0x3276, "M", "자"), - (0x3277, "M", "차"), - (0x3278, "M", "카"), - (0x3279, "M", "타"), - (0x327A, "M", "파"), - (0x327B, "M", "하"), - (0x327C, "M", "참고"), - (0x327D, "M", "주의"), - (0x327E, "M", "우"), - (0x327F, "V"), - (0x3280, "M", "一"), - (0x3281, "M", "二"), - (0x3282, "M", "三"), - (0x3283, "M", "四"), - (0x3284, "M", "五"), - (0x3285, "M", "六"), - (0x3286, "M", "七"), - (0x3287, "M", "八"), - (0x3288, "M", "九"), - (0x3289, "M", "十"), - (0x328A, "M", "月"), - (0x328B, "M", "火"), - (0x328C, "M", "水"), - (0x328D, "M", "木"), - (0x328E, "M", "金"), - (0x328F, "M", "土"), - (0x3290, "M", "日"), - (0x3291, "M", "株"), - (0x3292, "M", "有"), - (0x3293, "M", "社"), - (0x3294, "M", "名"), - (0x3295, "M", "特"), - (0x3296, "M", "財"), - (0x3297, "M", "祝"), - (0x3298, "M", "労"), - (0x3299, "M", "秘"), - (0x329A, "M", "男"), - (0x329B, "M", "女"), - (0x329C, "M", "適"), - (0x329D, "M", "優"), - (0x329E, "M", "印"), - (0x329F, "M", "注"), - (0x32A0, "M", "項"), - (0x32A1, "M", "休"), - (0x32A2, "M", "写"), - (0x32A3, "M", "正"), - (0x32A4, "M", "上"), - (0x32A5, "M", "中"), - (0x32A6, "M", "下"), - (0x32A7, "M", "左"), - (0x32A8, "M", "右"), - (0x32A9, "M", "医"), - (0x32AA, "M", "宗"), - (0x32AB, "M", "学"), - (0x32AC, "M", "監"), - (0x32AD, "M", "企"), - (0x32AE, "M", "資"), - (0x32AF, "M", "協"), - (0x32B0, "M", "夜"), - (0x32B1, "M", "36"), - (0x32B2, "M", "37"), - (0x32B3, "M", "38"), - (0x32B4, "M", "39"), - (0x32B5, "M", "40"), - ] - - -def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x32B6, "M", "41"), - (0x32B7, "M", "42"), - (0x32B8, "M", "43"), - (0x32B9, "M", "44"), - (0x32BA, "M", "45"), - (0x32BB, "M", "46"), - (0x32BC, "M", "47"), - (0x32BD, "M", "48"), - (0x32BE, "M", "49"), - (0x32BF, "M", "50"), - (0x32C0, "M", "1月"), - (0x32C1, "M", "2月"), - (0x32C2, "M", "3月"), - (0x32C3, "M", "4月"), - (0x32C4, "M", "5月"), - (0x32C5, "M", "6月"), - (0x32C6, "M", "7月"), - (0x32C7, "M", "8月"), - (0x32C8, "M", "9月"), - (0x32C9, "M", "10月"), - (0x32CA, "M", "11月"), - (0x32CB, "M", "12月"), - (0x32CC, "M", "hg"), - (0x32CD, "M", "erg"), - (0x32CE, "M", "ev"), - (0x32CF, "M", "ltd"), - (0x32D0, "M", "ア"), - (0x32D1, "M", "イ"), - (0x32D2, "M", "ウ"), - (0x32D3, "M", "エ"), - (0x32D4, "M", "オ"), - (0x32D5, "M", "カ"), - (0x32D6, "M", "キ"), - (0x32D7, "M", "ク"), - (0x32D8, "M", "ケ"), - (0x32D9, "M", "コ"), - (0x32DA, "M", "サ"), - (0x32DB, "M", "シ"), - (0x32DC, "M", "ス"), - (0x32DD, "M", "セ"), - (0x32DE, "M", "ソ"), - (0x32DF, "M", "タ"), - (0x32E0, "M", "チ"), - (0x32E1, "M", "ツ"), - (0x32E2, "M", "テ"), - (0x32E3, "M", "ト"), - (0x32E4, "M", "ナ"), - (0x32E5, "M", "ニ"), - (0x32E6, "M", "ヌ"), - (0x32E7, "M", "ネ"), - (0x32E8, "M", "ノ"), - (0x32E9, "M", "ハ"), - (0x32EA, "M", "ヒ"), - (0x32EB, "M", "フ"), - (0x32EC, "M", "ヘ"), - (0x32ED, "M", "ホ"), - (0x32EE, "M", "マ"), - (0x32EF, "M", "ミ"), - (0x32F0, "M", "ム"), - (0x32F1, "M", "メ"), - (0x32F2, "M", "モ"), - (0x32F3, "M", "ヤ"), - (0x32F4, "M", "ユ"), - (0x32F5, "M", "ヨ"), - (0x32F6, "M", "ラ"), - (0x32F7, "M", "リ"), - (0x32F8, "M", "ル"), - (0x32F9, "M", "レ"), - (0x32FA, "M", "ロ"), - (0x32FB, "M", "ワ"), - (0x32FC, "M", "ヰ"), - (0x32FD, "M", "ヱ"), - (0x32FE, "M", "ヲ"), - (0x32FF, "M", "令和"), - (0x3300, "M", "アパート"), - (0x3301, "M", "アルファ"), - (0x3302, "M", "アンペア"), - (0x3303, "M", "アール"), - (0x3304, "M", "イニング"), - (0x3305, "M", "インチ"), - (0x3306, "M", "ウォン"), - (0x3307, "M", "エスクード"), - (0x3308, "M", "エーカー"), - (0x3309, "M", "オンス"), - (0x330A, "M", "オーム"), - (0x330B, "M", "カイリ"), - (0x330C, "M", "カラット"), - (0x330D, "M", "カロリー"), - (0x330E, "M", "ガロン"), - (0x330F, "M", "ガンマ"), - (0x3310, "M", "ギガ"), - (0x3311, "M", "ギニー"), - (0x3312, "M", "キュリー"), - (0x3313, "M", "ギルダー"), - (0x3314, "M", "キロ"), - (0x3315, "M", "キログラム"), - (0x3316, "M", "キロメートル"), - (0x3317, "M", "キロワット"), - (0x3318, "M", "グラム"), - (0x3319, "M", "グラムトン"), - ] - - -def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x331A, "M", "クルゼイロ"), - (0x331B, "M", "クローネ"), - (0x331C, "M", "ケース"), - (0x331D, "M", "コルナ"), - (0x331E, "M", "コーポ"), - (0x331F, "M", "サイクル"), - (0x3320, "M", "サンチーム"), - (0x3321, "M", "シリング"), - (0x3322, "M", "センチ"), - (0x3323, "M", "セント"), - (0x3324, "M", "ダース"), - (0x3325, "M", "デシ"), - (0x3326, "M", "ドル"), - (0x3327, "M", "トン"), - (0x3328, "M", "ナノ"), - (0x3329, "M", "ノット"), - (0x332A, "M", "ハイツ"), - (0x332B, "M", "パーセント"), - (0x332C, "M", "パーツ"), - (0x332D, "M", "バーレル"), - (0x332E, "M", "ピアストル"), - (0x332F, "M", "ピクル"), - (0x3330, "M", "ピコ"), - (0x3331, "M", "ビル"), - (0x3332, "M", "ファラッド"), - (0x3333, "M", "フィート"), - (0x3334, "M", "ブッシェル"), - (0x3335, "M", "フラン"), - (0x3336, "M", "ヘクタール"), - (0x3337, "M", "ペソ"), - (0x3338, "M", "ペニヒ"), - (0x3339, "M", "ヘルツ"), - (0x333A, "M", "ペンス"), - (0x333B, "M", "ページ"), - (0x333C, "M", "ベータ"), - (0x333D, "M", "ポイント"), - (0x333E, "M", "ボルト"), - (0x333F, "M", "ホン"), - (0x3340, "M", "ポンド"), - (0x3341, "M", "ホール"), - (0x3342, "M", "ホーン"), - (0x3343, "M", "マイクロ"), - (0x3344, "M", "マイル"), - (0x3345, "M", "マッハ"), - (0x3346, "M", "マルク"), - (0x3347, "M", "マンション"), - (0x3348, "M", "ミクロン"), - (0x3349, "M", "ミリ"), - (0x334A, "M", "ミリバール"), - (0x334B, "M", "メガ"), - (0x334C, "M", "メガトン"), - (0x334D, "M", "メートル"), - (0x334E, "M", "ヤード"), - (0x334F, "M", "ヤール"), - (0x3350, "M", "ユアン"), - (0x3351, "M", "リットル"), - (0x3352, "M", "リラ"), - (0x3353, "M", "ルピー"), - (0x3354, "M", "ルーブル"), - (0x3355, "M", "レム"), - (0x3356, "M", "レントゲン"), - (0x3357, "M", "ワット"), - (0x3358, "M", "0点"), - (0x3359, "M", "1点"), - (0x335A, "M", "2点"), - (0x335B, "M", "3点"), - (0x335C, "M", "4点"), - (0x335D, "M", "5点"), - (0x335E, "M", "6点"), - (0x335F, "M", "7点"), - (0x3360, "M", "8点"), - (0x3361, "M", "9点"), - (0x3362, "M", "10点"), - (0x3363, "M", "11点"), - (0x3364, "M", "12点"), - (0x3365, "M", "13点"), - (0x3366, "M", "14点"), - (0x3367, "M", "15点"), - (0x3368, "M", "16点"), - (0x3369, "M", "17点"), - (0x336A, "M", "18点"), - (0x336B, "M", "19点"), - (0x336C, "M", "20点"), - (0x336D, "M", "21点"), - (0x336E, "M", "22点"), - (0x336F, "M", "23点"), - (0x3370, "M", "24点"), - (0x3371, "M", "hpa"), - (0x3372, "M", "da"), - (0x3373, "M", "au"), - (0x3374, "M", "bar"), - (0x3375, "M", "ov"), - (0x3376, "M", "pc"), - (0x3377, "M", "dm"), - (0x3378, "M", "dm2"), - (0x3379, "M", "dm3"), - (0x337A, "M", "iu"), - (0x337B, "M", "平成"), - (0x337C, "M", "昭和"), - (0x337D, "M", "大正"), - ] - - -def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x337E, "M", "明治"), - (0x337F, "M", "株式会社"), - (0x3380, "M", "pa"), - (0x3381, "M", "na"), - (0x3382, "M", "μa"), - (0x3383, "M", "ma"), - (0x3384, "M", "ka"), - (0x3385, "M", "kb"), - (0x3386, "M", "mb"), - (0x3387, "M", "gb"), - (0x3388, "M", "cal"), - (0x3389, "M", "kcal"), - (0x338A, "M", "pf"), - (0x338B, "M", "nf"), - (0x338C, "M", "μf"), - (0x338D, "M", "μg"), - (0x338E, "M", "mg"), - (0x338F, "M", "kg"), - (0x3390, "M", "hz"), - (0x3391, "M", "khz"), - (0x3392, "M", "mhz"), - (0x3393, "M", "ghz"), - (0x3394, "M", "thz"), - (0x3395, "M", "μl"), - (0x3396, "M", "ml"), - (0x3397, "M", "dl"), - (0x3398, "M", "kl"), - (0x3399, "M", "fm"), - (0x339A, "M", "nm"), - (0x339B, "M", "μm"), - (0x339C, "M", "mm"), - (0x339D, "M", "cm"), - (0x339E, "M", "km"), - (0x339F, "M", "mm2"), - (0x33A0, "M", "cm2"), - (0x33A1, "M", "m2"), - (0x33A2, "M", "km2"), - (0x33A3, "M", "mm3"), - (0x33A4, "M", "cm3"), - (0x33A5, "M", "m3"), - (0x33A6, "M", "km3"), - (0x33A7, "M", "m∕s"), - (0x33A8, "M", "m∕s2"), - (0x33A9, "M", "pa"), - (0x33AA, "M", "kpa"), - (0x33AB, "M", "mpa"), - (0x33AC, "M", "gpa"), - (0x33AD, "M", "rad"), - (0x33AE, "M", "rad∕s"), - (0x33AF, "M", "rad∕s2"), - (0x33B0, "M", "ps"), - (0x33B1, "M", "ns"), - (0x33B2, "M", "μs"), - (0x33B3, "M", "ms"), - (0x33B4, "M", "pv"), - (0x33B5, "M", "nv"), - (0x33B6, "M", "μv"), - (0x33B7, "M", "mv"), - (0x33B8, "M", "kv"), - (0x33B9, "M", "mv"), - (0x33BA, "M", "pw"), - (0x33BB, "M", "nw"), - (0x33BC, "M", "μw"), - (0x33BD, "M", "mw"), - (0x33BE, "M", "kw"), - (0x33BF, "M", "mw"), - (0x33C0, "M", "kω"), - (0x33C1, "M", "mω"), - (0x33C2, "X"), - (0x33C3, "M", "bq"), - (0x33C4, "M", "cc"), - (0x33C5, "M", "cd"), - (0x33C6, "M", "c∕kg"), - (0x33C7, "X"), - (0x33C8, "M", "db"), - (0x33C9, "M", "gy"), - (0x33CA, "M", "ha"), - (0x33CB, "M", "hp"), - (0x33CC, "M", "in"), - (0x33CD, "M", "kk"), - (0x33CE, "M", "km"), - (0x33CF, "M", "kt"), - (0x33D0, "M", "lm"), - (0x33D1, "M", "ln"), - (0x33D2, "M", "log"), - (0x33D3, "M", "lx"), - (0x33D4, "M", "mb"), - (0x33D5, "M", "mil"), - (0x33D6, "M", "mol"), - (0x33D7, "M", "ph"), - (0x33D8, "X"), - (0x33D9, "M", "ppm"), - (0x33DA, "M", "pr"), - (0x33DB, "M", "sr"), - (0x33DC, "M", "sv"), - (0x33DD, "M", "wb"), - (0x33DE, "M", "v∕m"), - (0x33DF, "M", "a∕m"), - (0x33E0, "M", "1日"), - (0x33E1, "M", "2日"), - ] - - -def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x33E2, "M", "3日"), - (0x33E3, "M", "4日"), - (0x33E4, "M", "5日"), - (0x33E5, "M", "6日"), - (0x33E6, "M", "7日"), - (0x33E7, "M", "8日"), - (0x33E8, "M", "9日"), - (0x33E9, "M", "10日"), - (0x33EA, "M", "11日"), - (0x33EB, "M", "12日"), - (0x33EC, "M", "13日"), - (0x33ED, "M", "14日"), - (0x33EE, "M", "15日"), - (0x33EF, "M", "16日"), - (0x33F0, "M", "17日"), - (0x33F1, "M", "18日"), - (0x33F2, "M", "19日"), - (0x33F3, "M", "20日"), - (0x33F4, "M", "21日"), - (0x33F5, "M", "22日"), - (0x33F6, "M", "23日"), - (0x33F7, "M", "24日"), - (0x33F8, "M", "25日"), - (0x33F9, "M", "26日"), - (0x33FA, "M", "27日"), - (0x33FB, "M", "28日"), - (0x33FC, "M", "29日"), - (0x33FD, "M", "30日"), - (0x33FE, "M", "31日"), - (0x33FF, "M", "gal"), - (0x3400, "V"), - (0xA48D, "X"), - (0xA490, "V"), - (0xA4C7, "X"), - (0xA4D0, "V"), - (0xA62C, "X"), - (0xA640, "M", "ꙁ"), - (0xA641, "V"), - (0xA642, "M", "ꙃ"), - (0xA643, "V"), - (0xA644, "M", "ꙅ"), - (0xA645, "V"), - (0xA646, "M", "ꙇ"), - (0xA647, "V"), - (0xA648, "M", "ꙉ"), - (0xA649, "V"), - (0xA64A, "M", "ꙋ"), - (0xA64B, "V"), - (0xA64C, "M", "ꙍ"), - (0xA64D, "V"), - (0xA64E, "M", "ꙏ"), - (0xA64F, "V"), - (0xA650, "M", "ꙑ"), - (0xA651, "V"), - (0xA652, "M", "ꙓ"), - (0xA653, "V"), - (0xA654, "M", "ꙕ"), - (0xA655, "V"), - (0xA656, "M", "ꙗ"), - (0xA657, "V"), - (0xA658, "M", "ꙙ"), - (0xA659, "V"), - (0xA65A, "M", "ꙛ"), - (0xA65B, "V"), - (0xA65C, "M", "ꙝ"), - (0xA65D, "V"), - (0xA65E, "M", "ꙟ"), - (0xA65F, "V"), - (0xA660, "M", "ꙡ"), - (0xA661, "V"), - (0xA662, "M", "ꙣ"), - (0xA663, "V"), - (0xA664, "M", "ꙥ"), - (0xA665, "V"), - (0xA666, "M", "ꙧ"), - (0xA667, "V"), - (0xA668, "M", "ꙩ"), - (0xA669, "V"), - (0xA66A, "M", "ꙫ"), - (0xA66B, "V"), - (0xA66C, "M", "ꙭ"), - (0xA66D, "V"), - (0xA680, "M", "ꚁ"), - (0xA681, "V"), - (0xA682, "M", "ꚃ"), - (0xA683, "V"), - (0xA684, "M", "ꚅ"), - (0xA685, "V"), - (0xA686, "M", "ꚇ"), - (0xA687, "V"), - (0xA688, "M", "ꚉ"), - (0xA689, "V"), - (0xA68A, "M", "ꚋ"), - (0xA68B, "V"), - (0xA68C, "M", "ꚍ"), - (0xA68D, "V"), - (0xA68E, "M", "ꚏ"), - (0xA68F, "V"), - (0xA690, "M", "ꚑ"), - (0xA691, "V"), - ] - - -def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA692, "M", "ꚓ"), - (0xA693, "V"), - (0xA694, "M", "ꚕ"), - (0xA695, "V"), - (0xA696, "M", "ꚗ"), - (0xA697, "V"), - (0xA698, "M", "ꚙ"), - (0xA699, "V"), - (0xA69A, "M", "ꚛ"), - (0xA69B, "V"), - (0xA69C, "M", "ъ"), - (0xA69D, "M", "ь"), - (0xA69E, "V"), - (0xA6F8, "X"), - (0xA700, "V"), - (0xA722, "M", "ꜣ"), - (0xA723, "V"), - (0xA724, "M", "ꜥ"), - (0xA725, "V"), - (0xA726, "M", "ꜧ"), - (0xA727, "V"), - (0xA728, "M", "ꜩ"), - (0xA729, "V"), - (0xA72A, "M", "ꜫ"), - (0xA72B, "V"), - (0xA72C, "M", "ꜭ"), - (0xA72D, "V"), - (0xA72E, "M", "ꜯ"), - (0xA72F, "V"), - (0xA732, "M", "ꜳ"), - (0xA733, "V"), - (0xA734, "M", "ꜵ"), - (0xA735, "V"), - (0xA736, "M", "ꜷ"), - (0xA737, "V"), - (0xA738, "M", "ꜹ"), - (0xA739, "V"), - (0xA73A, "M", "ꜻ"), - (0xA73B, "V"), - (0xA73C, "M", "ꜽ"), - (0xA73D, "V"), - (0xA73E, "M", "ꜿ"), - (0xA73F, "V"), - (0xA740, "M", "ꝁ"), - (0xA741, "V"), - (0xA742, "M", "ꝃ"), - (0xA743, "V"), - (0xA744, "M", "ꝅ"), - (0xA745, "V"), - (0xA746, "M", "ꝇ"), - (0xA747, "V"), - (0xA748, "M", "ꝉ"), - (0xA749, "V"), - (0xA74A, "M", "ꝋ"), - (0xA74B, "V"), - (0xA74C, "M", "ꝍ"), - (0xA74D, "V"), - (0xA74E, "M", "ꝏ"), - (0xA74F, "V"), - (0xA750, "M", "ꝑ"), - (0xA751, "V"), - (0xA752, "M", "ꝓ"), - (0xA753, "V"), - (0xA754, "M", "ꝕ"), - (0xA755, "V"), - (0xA756, "M", "ꝗ"), - (0xA757, "V"), - (0xA758, "M", "ꝙ"), - (0xA759, "V"), - (0xA75A, "M", "ꝛ"), - (0xA75B, "V"), - (0xA75C, "M", "ꝝ"), - (0xA75D, "V"), - (0xA75E, "M", "ꝟ"), - (0xA75F, "V"), - (0xA760, "M", "ꝡ"), - (0xA761, "V"), - (0xA762, "M", "ꝣ"), - (0xA763, "V"), - (0xA764, "M", "ꝥ"), - (0xA765, "V"), - (0xA766, "M", "ꝧ"), - (0xA767, "V"), - (0xA768, "M", "ꝩ"), - (0xA769, "V"), - (0xA76A, "M", "ꝫ"), - (0xA76B, "V"), - (0xA76C, "M", "ꝭ"), - (0xA76D, "V"), - (0xA76E, "M", "ꝯ"), - (0xA76F, "V"), - (0xA770, "M", "ꝯ"), - (0xA771, "V"), - (0xA779, "M", "ꝺ"), - (0xA77A, "V"), - (0xA77B, "M", "ꝼ"), - (0xA77C, "V"), - (0xA77D, "M", "ᵹ"), - (0xA77E, "M", "ꝿ"), - (0xA77F, "V"), - ] - - -def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA780, "M", "ꞁ"), - (0xA781, "V"), - (0xA782, "M", "ꞃ"), - (0xA783, "V"), - (0xA784, "M", "ꞅ"), - (0xA785, "V"), - (0xA786, "M", "ꞇ"), - (0xA787, "V"), - (0xA78B, "M", "ꞌ"), - (0xA78C, "V"), - (0xA78D, "M", "ɥ"), - (0xA78E, "V"), - (0xA790, "M", "ꞑ"), - (0xA791, "V"), - (0xA792, "M", "ꞓ"), - (0xA793, "V"), - (0xA796, "M", "ꞗ"), - (0xA797, "V"), - (0xA798, "M", "ꞙ"), - (0xA799, "V"), - (0xA79A, "M", "ꞛ"), - (0xA79B, "V"), - (0xA79C, "M", "ꞝ"), - (0xA79D, "V"), - (0xA79E, "M", "ꞟ"), - (0xA79F, "V"), - (0xA7A0, "M", "ꞡ"), - (0xA7A1, "V"), - (0xA7A2, "M", "ꞣ"), - (0xA7A3, "V"), - (0xA7A4, "M", "ꞥ"), - (0xA7A5, "V"), - (0xA7A6, "M", "ꞧ"), - (0xA7A7, "V"), - (0xA7A8, "M", "ꞩ"), - (0xA7A9, "V"), - (0xA7AA, "M", "ɦ"), - (0xA7AB, "M", "ɜ"), - (0xA7AC, "M", "ɡ"), - (0xA7AD, "M", "ɬ"), - (0xA7AE, "M", "ɪ"), - (0xA7AF, "V"), - (0xA7B0, "M", "ʞ"), - (0xA7B1, "M", "ʇ"), - (0xA7B2, "M", "ʝ"), - (0xA7B3, "M", "ꭓ"), - (0xA7B4, "M", "ꞵ"), - (0xA7B5, "V"), - (0xA7B6, "M", "ꞷ"), - (0xA7B7, "V"), - (0xA7B8, "M", "ꞹ"), - (0xA7B9, "V"), - (0xA7BA, "M", "ꞻ"), - (0xA7BB, "V"), - (0xA7BC, "M", "ꞽ"), - (0xA7BD, "V"), - (0xA7BE, "M", "ꞿ"), - (0xA7BF, "V"), - (0xA7C0, "M", "ꟁ"), - (0xA7C1, "V"), - (0xA7C2, "M", "ꟃ"), - (0xA7C3, "V"), - (0xA7C4, "M", "ꞔ"), - (0xA7C5, "M", "ʂ"), - (0xA7C6, "M", "ᶎ"), - (0xA7C7, "M", "ꟈ"), - (0xA7C8, "V"), - (0xA7C9, "M", "ꟊ"), - (0xA7CA, "V"), - (0xA7CB, "X"), - (0xA7D0, "M", "ꟑ"), - (0xA7D1, "V"), - (0xA7D2, "X"), - (0xA7D3, "V"), - (0xA7D4, "X"), - (0xA7D5, "V"), - (0xA7D6, "M", "ꟗ"), - (0xA7D7, "V"), - (0xA7D8, "M", "ꟙ"), - (0xA7D9, "V"), - (0xA7DA, "X"), - (0xA7F2, "M", "c"), - (0xA7F3, "M", "f"), - (0xA7F4, "M", "q"), - (0xA7F5, "M", "ꟶ"), - (0xA7F6, "V"), - (0xA7F8, "M", "ħ"), - (0xA7F9, "M", "œ"), - (0xA7FA, "V"), - (0xA82D, "X"), - (0xA830, "V"), - (0xA83A, "X"), - (0xA840, "V"), - (0xA878, "X"), - (0xA880, "V"), - (0xA8C6, "X"), - (0xA8CE, "V"), - (0xA8DA, "X"), - (0xA8E0, "V"), - (0xA954, "X"), - ] - - -def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA95F, "V"), - (0xA97D, "X"), - (0xA980, "V"), - (0xA9CE, "X"), - (0xA9CF, "V"), - (0xA9DA, "X"), - (0xA9DE, "V"), - (0xA9FF, "X"), - (0xAA00, "V"), - (0xAA37, "X"), - (0xAA40, "V"), - (0xAA4E, "X"), - (0xAA50, "V"), - (0xAA5A, "X"), - (0xAA5C, "V"), - (0xAAC3, "X"), - (0xAADB, "V"), - (0xAAF7, "X"), - (0xAB01, "V"), - (0xAB07, "X"), - (0xAB09, "V"), - (0xAB0F, "X"), - (0xAB11, "V"), - (0xAB17, "X"), - (0xAB20, "V"), - (0xAB27, "X"), - (0xAB28, "V"), - (0xAB2F, "X"), - (0xAB30, "V"), - (0xAB5C, "M", "ꜧ"), - (0xAB5D, "M", "ꬷ"), - (0xAB5E, "M", "ɫ"), - (0xAB5F, "M", "ꭒ"), - (0xAB60, "V"), - (0xAB69, "M", "ʍ"), - (0xAB6A, "V"), - (0xAB6C, "X"), - (0xAB70, "M", "Ꭰ"), - (0xAB71, "M", "Ꭱ"), - (0xAB72, "M", "Ꭲ"), - (0xAB73, "M", "Ꭳ"), - (0xAB74, "M", "Ꭴ"), - (0xAB75, "M", "Ꭵ"), - (0xAB76, "M", "Ꭶ"), - (0xAB77, "M", "Ꭷ"), - (0xAB78, "M", "Ꭸ"), - (0xAB79, "M", "Ꭹ"), - (0xAB7A, "M", "Ꭺ"), - (0xAB7B, "M", "Ꭻ"), - (0xAB7C, "M", "Ꭼ"), - (0xAB7D, "M", "Ꭽ"), - (0xAB7E, "M", "Ꭾ"), - (0xAB7F, "M", "Ꭿ"), - (0xAB80, "M", "Ꮀ"), - (0xAB81, "M", "Ꮁ"), - (0xAB82, "M", "Ꮂ"), - (0xAB83, "M", "Ꮃ"), - (0xAB84, "M", "Ꮄ"), - (0xAB85, "M", "Ꮅ"), - (0xAB86, "M", "Ꮆ"), - (0xAB87, "M", "Ꮇ"), - (0xAB88, "M", "Ꮈ"), - (0xAB89, "M", "Ꮉ"), - (0xAB8A, "M", "Ꮊ"), - (0xAB8B, "M", "Ꮋ"), - (0xAB8C, "M", "Ꮌ"), - (0xAB8D, "M", "Ꮍ"), - (0xAB8E, "M", "Ꮎ"), - (0xAB8F, "M", "Ꮏ"), - (0xAB90, "M", "Ꮐ"), - (0xAB91, "M", "Ꮑ"), - (0xAB92, "M", "Ꮒ"), - (0xAB93, "M", "Ꮓ"), - (0xAB94, "M", "Ꮔ"), - (0xAB95, "M", "Ꮕ"), - (0xAB96, "M", "Ꮖ"), - (0xAB97, "M", "Ꮗ"), - (0xAB98, "M", "Ꮘ"), - (0xAB99, "M", "Ꮙ"), - (0xAB9A, "M", "Ꮚ"), - (0xAB9B, "M", "Ꮛ"), - (0xAB9C, "M", "Ꮜ"), - (0xAB9D, "M", "Ꮝ"), - (0xAB9E, "M", "Ꮞ"), - (0xAB9F, "M", "Ꮟ"), - (0xABA0, "M", "Ꮠ"), - (0xABA1, "M", "Ꮡ"), - (0xABA2, "M", "Ꮢ"), - (0xABA3, "M", "Ꮣ"), - (0xABA4, "M", "Ꮤ"), - (0xABA5, "M", "Ꮥ"), - (0xABA6, "M", "Ꮦ"), - (0xABA7, "M", "Ꮧ"), - (0xABA8, "M", "Ꮨ"), - (0xABA9, "M", "Ꮩ"), - (0xABAA, "M", "Ꮪ"), - (0xABAB, "M", "Ꮫ"), - (0xABAC, "M", "Ꮬ"), - (0xABAD, "M", "Ꮭ"), - (0xABAE, "M", "Ꮮ"), - ] - - -def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xABAF, "M", "Ꮯ"), - (0xABB0, "M", "Ꮰ"), - (0xABB1, "M", "Ꮱ"), - (0xABB2, "M", "Ꮲ"), - (0xABB3, "M", "Ꮳ"), - (0xABB4, "M", "Ꮴ"), - (0xABB5, "M", "Ꮵ"), - (0xABB6, "M", "Ꮶ"), - (0xABB7, "M", "Ꮷ"), - (0xABB8, "M", "Ꮸ"), - (0xABB9, "M", "Ꮹ"), - (0xABBA, "M", "Ꮺ"), - (0xABBB, "M", "Ꮻ"), - (0xABBC, "M", "Ꮼ"), - (0xABBD, "M", "Ꮽ"), - (0xABBE, "M", "Ꮾ"), - (0xABBF, "M", "Ꮿ"), - (0xABC0, "V"), - (0xABEE, "X"), - (0xABF0, "V"), - (0xABFA, "X"), - (0xAC00, "V"), - (0xD7A4, "X"), - (0xD7B0, "V"), - (0xD7C7, "X"), - (0xD7CB, "V"), - (0xD7FC, "X"), - (0xF900, "M", "豈"), - (0xF901, "M", "更"), - (0xF902, "M", "車"), - (0xF903, "M", "賈"), - (0xF904, "M", "滑"), - (0xF905, "M", "串"), - (0xF906, "M", "句"), - (0xF907, "M", "龜"), - (0xF909, "M", "契"), - (0xF90A, "M", "金"), - (0xF90B, "M", "喇"), - (0xF90C, "M", "奈"), - (0xF90D, "M", "懶"), - (0xF90E, "M", "癩"), - (0xF90F, "M", "羅"), - (0xF910, "M", "蘿"), - (0xF911, "M", "螺"), - (0xF912, "M", "裸"), - (0xF913, "M", "邏"), - (0xF914, "M", "樂"), - (0xF915, "M", "洛"), - (0xF916, "M", "烙"), - (0xF917, "M", "珞"), - (0xF918, "M", "落"), - (0xF919, "M", "酪"), - (0xF91A, "M", "駱"), - (0xF91B, "M", "亂"), - (0xF91C, "M", "卵"), - (0xF91D, "M", "欄"), - (0xF91E, "M", "爛"), - (0xF91F, "M", "蘭"), - (0xF920, "M", "鸞"), - (0xF921, "M", "嵐"), - (0xF922, "M", "濫"), - (0xF923, "M", "藍"), - (0xF924, "M", "襤"), - (0xF925, "M", "拉"), - (0xF926, "M", "臘"), - (0xF927, "M", "蠟"), - (0xF928, "M", "廊"), - (0xF929, "M", "朗"), - (0xF92A, "M", "浪"), - (0xF92B, "M", "狼"), - (0xF92C, "M", "郎"), - (0xF92D, "M", "來"), - (0xF92E, "M", "冷"), - (0xF92F, "M", "勞"), - (0xF930, "M", "擄"), - (0xF931, "M", "櫓"), - (0xF932, "M", "爐"), - (0xF933, "M", "盧"), - (0xF934, "M", "老"), - (0xF935, "M", "蘆"), - (0xF936, "M", "虜"), - (0xF937, "M", "路"), - (0xF938, "M", "露"), - (0xF939, "M", "魯"), - (0xF93A, "M", "鷺"), - (0xF93B, "M", "碌"), - (0xF93C, "M", "祿"), - (0xF93D, "M", "綠"), - (0xF93E, "M", "菉"), - (0xF93F, "M", "錄"), - (0xF940, "M", "鹿"), - (0xF941, "M", "論"), - (0xF942, "M", "壟"), - (0xF943, "M", "弄"), - (0xF944, "M", "籠"), - (0xF945, "M", "聾"), - (0xF946, "M", "牢"), - (0xF947, "M", "磊"), - (0xF948, "M", "賂"), - (0xF949, "M", "雷"), - ] - - -def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF94A, "M", "壘"), - (0xF94B, "M", "屢"), - (0xF94C, "M", "樓"), - (0xF94D, "M", "淚"), - (0xF94E, "M", "漏"), - (0xF94F, "M", "累"), - (0xF950, "M", "縷"), - (0xF951, "M", "陋"), - (0xF952, "M", "勒"), - (0xF953, "M", "肋"), - (0xF954, "M", "凜"), - (0xF955, "M", "凌"), - (0xF956, "M", "稜"), - (0xF957, "M", "綾"), - (0xF958, "M", "菱"), - (0xF959, "M", "陵"), - (0xF95A, "M", "讀"), - (0xF95B, "M", "拏"), - (0xF95C, "M", "樂"), - (0xF95D, "M", "諾"), - (0xF95E, "M", "丹"), - (0xF95F, "M", "寧"), - (0xF960, "M", "怒"), - (0xF961, "M", "率"), - (0xF962, "M", "異"), - (0xF963, "M", "北"), - (0xF964, "M", "磻"), - (0xF965, "M", "便"), - (0xF966, "M", "復"), - (0xF967, "M", "不"), - (0xF968, "M", "泌"), - (0xF969, "M", "數"), - (0xF96A, "M", "索"), - (0xF96B, "M", "參"), - (0xF96C, "M", "塞"), - (0xF96D, "M", "省"), - (0xF96E, "M", "葉"), - (0xF96F, "M", "說"), - (0xF970, "M", "殺"), - (0xF971, "M", "辰"), - (0xF972, "M", "沈"), - (0xF973, "M", "拾"), - (0xF974, "M", "若"), - (0xF975, "M", "掠"), - (0xF976, "M", "略"), - (0xF977, "M", "亮"), - (0xF978, "M", "兩"), - (0xF979, "M", "凉"), - (0xF97A, "M", "梁"), - (0xF97B, "M", "糧"), - (0xF97C, "M", "良"), - (0xF97D, "M", "諒"), - (0xF97E, "M", "量"), - (0xF97F, "M", "勵"), - (0xF980, "M", "呂"), - (0xF981, "M", "女"), - (0xF982, "M", "廬"), - (0xF983, "M", "旅"), - (0xF984, "M", "濾"), - (0xF985, "M", "礪"), - (0xF986, "M", "閭"), - (0xF987, "M", "驪"), - (0xF988, "M", "麗"), - (0xF989, "M", "黎"), - (0xF98A, "M", "力"), - (0xF98B, "M", "曆"), - (0xF98C, "M", "歷"), - (0xF98D, "M", "轢"), - (0xF98E, "M", "年"), - (0xF98F, "M", "憐"), - (0xF990, "M", "戀"), - (0xF991, "M", "撚"), - (0xF992, "M", "漣"), - (0xF993, "M", "煉"), - (0xF994, "M", "璉"), - (0xF995, "M", "秊"), - (0xF996, "M", "練"), - (0xF997, "M", "聯"), - (0xF998, "M", "輦"), - (0xF999, "M", "蓮"), - (0xF99A, "M", "連"), - (0xF99B, "M", "鍊"), - (0xF99C, "M", "列"), - (0xF99D, "M", "劣"), - (0xF99E, "M", "咽"), - (0xF99F, "M", "烈"), - (0xF9A0, "M", "裂"), - (0xF9A1, "M", "說"), - (0xF9A2, "M", "廉"), - (0xF9A3, "M", "念"), - (0xF9A4, "M", "捻"), - (0xF9A5, "M", "殮"), - (0xF9A6, "M", "簾"), - (0xF9A7, "M", "獵"), - (0xF9A8, "M", "令"), - (0xF9A9, "M", "囹"), - (0xF9AA, "M", "寧"), - (0xF9AB, "M", "嶺"), - (0xF9AC, "M", "怜"), - (0xF9AD, "M", "玲"), - ] - - -def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF9AE, "M", "瑩"), - (0xF9AF, "M", "羚"), - (0xF9B0, "M", "聆"), - (0xF9B1, "M", "鈴"), - (0xF9B2, "M", "零"), - (0xF9B3, "M", "靈"), - (0xF9B4, "M", "領"), - (0xF9B5, "M", "例"), - (0xF9B6, "M", "禮"), - (0xF9B7, "M", "醴"), - (0xF9B8, "M", "隸"), - (0xF9B9, "M", "惡"), - (0xF9BA, "M", "了"), - (0xF9BB, "M", "僚"), - (0xF9BC, "M", "寮"), - (0xF9BD, "M", "尿"), - (0xF9BE, "M", "料"), - (0xF9BF, "M", "樂"), - (0xF9C0, "M", "燎"), - (0xF9C1, "M", "療"), - (0xF9C2, "M", "蓼"), - (0xF9C3, "M", "遼"), - (0xF9C4, "M", "龍"), - (0xF9C5, "M", "暈"), - (0xF9C6, "M", "阮"), - (0xF9C7, "M", "劉"), - (0xF9C8, "M", "杻"), - (0xF9C9, "M", "柳"), - (0xF9CA, "M", "流"), - (0xF9CB, "M", "溜"), - (0xF9CC, "M", "琉"), - (0xF9CD, "M", "留"), - (0xF9CE, "M", "硫"), - (0xF9CF, "M", "紐"), - (0xF9D0, "M", "類"), - (0xF9D1, "M", "六"), - (0xF9D2, "M", "戮"), - (0xF9D3, "M", "陸"), - (0xF9D4, "M", "倫"), - (0xF9D5, "M", "崙"), - (0xF9D6, "M", "淪"), - (0xF9D7, "M", "輪"), - (0xF9D8, "M", "律"), - (0xF9D9, "M", "慄"), - (0xF9DA, "M", "栗"), - (0xF9DB, "M", "率"), - (0xF9DC, "M", "隆"), - (0xF9DD, "M", "利"), - (0xF9DE, "M", "吏"), - (0xF9DF, "M", "履"), - (0xF9E0, "M", "易"), - (0xF9E1, "M", "李"), - (0xF9E2, "M", "梨"), - (0xF9E3, "M", "泥"), - (0xF9E4, "M", "理"), - (0xF9E5, "M", "痢"), - (0xF9E6, "M", "罹"), - (0xF9E7, "M", "裏"), - (0xF9E8, "M", "裡"), - (0xF9E9, "M", "里"), - (0xF9EA, "M", "離"), - (0xF9EB, "M", "匿"), - (0xF9EC, "M", "溺"), - (0xF9ED, "M", "吝"), - (0xF9EE, "M", "燐"), - (0xF9EF, "M", "璘"), - (0xF9F0, "M", "藺"), - (0xF9F1, "M", "隣"), - (0xF9F2, "M", "鱗"), - (0xF9F3, "M", "麟"), - (0xF9F4, "M", "林"), - (0xF9F5, "M", "淋"), - (0xF9F6, "M", "臨"), - (0xF9F7, "M", "立"), - (0xF9F8, "M", "笠"), - (0xF9F9, "M", "粒"), - (0xF9FA, "M", "狀"), - (0xF9FB, "M", "炙"), - (0xF9FC, "M", "識"), - (0xF9FD, "M", "什"), - (0xF9FE, "M", "茶"), - (0xF9FF, "M", "刺"), - (0xFA00, "M", "切"), - (0xFA01, "M", "度"), - (0xFA02, "M", "拓"), - (0xFA03, "M", "糖"), - (0xFA04, "M", "宅"), - (0xFA05, "M", "洞"), - (0xFA06, "M", "暴"), - (0xFA07, "M", "輻"), - (0xFA08, "M", "行"), - (0xFA09, "M", "降"), - (0xFA0A, "M", "見"), - (0xFA0B, "M", "廓"), - (0xFA0C, "M", "兀"), - (0xFA0D, "M", "嗀"), - (0xFA0E, "V"), - (0xFA10, "M", "塚"), - (0xFA11, "V"), - (0xFA12, "M", "晴"), - ] - - -def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA13, "V"), - (0xFA15, "M", "凞"), - (0xFA16, "M", "猪"), - (0xFA17, "M", "益"), - (0xFA18, "M", "礼"), - (0xFA19, "M", "神"), - (0xFA1A, "M", "祥"), - (0xFA1B, "M", "福"), - (0xFA1C, "M", "靖"), - (0xFA1D, "M", "精"), - (0xFA1E, "M", "羽"), - (0xFA1F, "V"), - (0xFA20, "M", "蘒"), - (0xFA21, "V"), - (0xFA22, "M", "諸"), - (0xFA23, "V"), - (0xFA25, "M", "逸"), - (0xFA26, "M", "都"), - (0xFA27, "V"), - (0xFA2A, "M", "飯"), - (0xFA2B, "M", "飼"), - (0xFA2C, "M", "館"), - (0xFA2D, "M", "鶴"), - (0xFA2E, "M", "郞"), - (0xFA2F, "M", "隷"), - (0xFA30, "M", "侮"), - (0xFA31, "M", "僧"), - (0xFA32, "M", "免"), - (0xFA33, "M", "勉"), - (0xFA34, "M", "勤"), - (0xFA35, "M", "卑"), - (0xFA36, "M", "喝"), - (0xFA37, "M", "嘆"), - (0xFA38, "M", "器"), - (0xFA39, "M", "塀"), - (0xFA3A, "M", "墨"), - (0xFA3B, "M", "層"), - (0xFA3C, "M", "屮"), - (0xFA3D, "M", "悔"), - (0xFA3E, "M", "慨"), - (0xFA3F, "M", "憎"), - (0xFA40, "M", "懲"), - (0xFA41, "M", "敏"), - (0xFA42, "M", "既"), - (0xFA43, "M", "暑"), - (0xFA44, "M", "梅"), - (0xFA45, "M", "海"), - (0xFA46, "M", "渚"), - (0xFA47, "M", "漢"), - (0xFA48, "M", "煮"), - (0xFA49, "M", "爫"), - (0xFA4A, "M", "琢"), - (0xFA4B, "M", "碑"), - (0xFA4C, "M", "社"), - (0xFA4D, "M", "祉"), - (0xFA4E, "M", "祈"), - (0xFA4F, "M", "祐"), - (0xFA50, "M", "祖"), - (0xFA51, "M", "祝"), - (0xFA52, "M", "禍"), - (0xFA53, "M", "禎"), - (0xFA54, "M", "穀"), - (0xFA55, "M", "突"), - (0xFA56, "M", "節"), - (0xFA57, "M", "練"), - (0xFA58, "M", "縉"), - (0xFA59, "M", "繁"), - (0xFA5A, "M", "署"), - (0xFA5B, "M", "者"), - (0xFA5C, "M", "臭"), - (0xFA5D, "M", "艹"), - (0xFA5F, "M", "著"), - (0xFA60, "M", "褐"), - (0xFA61, "M", "視"), - (0xFA62, "M", "謁"), - (0xFA63, "M", "謹"), - (0xFA64, "M", "賓"), - (0xFA65, "M", "贈"), - (0xFA66, "M", "辶"), - (0xFA67, "M", "逸"), - (0xFA68, "M", "難"), - (0xFA69, "M", "響"), - (0xFA6A, "M", "頻"), - (0xFA6B, "M", "恵"), - (0xFA6C, "M", "𤋮"), - (0xFA6D, "M", "舘"), - (0xFA6E, "X"), - (0xFA70, "M", "並"), - (0xFA71, "M", "况"), - (0xFA72, "M", "全"), - (0xFA73, "M", "侀"), - (0xFA74, "M", "充"), - (0xFA75, "M", "冀"), - (0xFA76, "M", "勇"), - (0xFA77, "M", "勺"), - (0xFA78, "M", "喝"), - (0xFA79, "M", "啕"), - (0xFA7A, "M", "喙"), - (0xFA7B, "M", "嗢"), - (0xFA7C, "M", "塚"), - ] - - -def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA7D, "M", "墳"), - (0xFA7E, "M", "奄"), - (0xFA7F, "M", "奔"), - (0xFA80, "M", "婢"), - (0xFA81, "M", "嬨"), - (0xFA82, "M", "廒"), - (0xFA83, "M", "廙"), - (0xFA84, "M", "彩"), - (0xFA85, "M", "徭"), - (0xFA86, "M", "惘"), - (0xFA87, "M", "慎"), - (0xFA88, "M", "愈"), - (0xFA89, "M", "憎"), - (0xFA8A, "M", "慠"), - (0xFA8B, "M", "懲"), - (0xFA8C, "M", "戴"), - (0xFA8D, "M", "揄"), - (0xFA8E, "M", "搜"), - (0xFA8F, "M", "摒"), - (0xFA90, "M", "敖"), - (0xFA91, "M", "晴"), - (0xFA92, "M", "朗"), - (0xFA93, "M", "望"), - (0xFA94, "M", "杖"), - (0xFA95, "M", "歹"), - (0xFA96, "M", "殺"), - (0xFA97, "M", "流"), - (0xFA98, "M", "滛"), - (0xFA99, "M", "滋"), - (0xFA9A, "M", "漢"), - (0xFA9B, "M", "瀞"), - (0xFA9C, "M", "煮"), - (0xFA9D, "M", "瞧"), - (0xFA9E, "M", "爵"), - (0xFA9F, "M", "犯"), - (0xFAA0, "M", "猪"), - (0xFAA1, "M", "瑱"), - (0xFAA2, "M", "甆"), - (0xFAA3, "M", "画"), - (0xFAA4, "M", "瘝"), - (0xFAA5, "M", "瘟"), - (0xFAA6, "M", "益"), - (0xFAA7, "M", "盛"), - (0xFAA8, "M", "直"), - (0xFAA9, "M", "睊"), - (0xFAAA, "M", "着"), - (0xFAAB, "M", "磌"), - (0xFAAC, "M", "窱"), - (0xFAAD, "M", "節"), - (0xFAAE, "M", "类"), - (0xFAAF, "M", "絛"), - (0xFAB0, "M", "練"), - (0xFAB1, "M", "缾"), - (0xFAB2, "M", "者"), - (0xFAB3, "M", "荒"), - (0xFAB4, "M", "華"), - (0xFAB5, "M", "蝹"), - (0xFAB6, "M", "襁"), - (0xFAB7, "M", "覆"), - (0xFAB8, "M", "視"), - (0xFAB9, "M", "調"), - (0xFABA, "M", "諸"), - (0xFABB, "M", "請"), - (0xFABC, "M", "謁"), - (0xFABD, "M", "諾"), - (0xFABE, "M", "諭"), - (0xFABF, "M", "謹"), - (0xFAC0, "M", "變"), - (0xFAC1, "M", "贈"), - (0xFAC2, "M", "輸"), - (0xFAC3, "M", "遲"), - (0xFAC4, "M", "醙"), - (0xFAC5, "M", "鉶"), - (0xFAC6, "M", "陼"), - (0xFAC7, "M", "難"), - (0xFAC8, "M", "靖"), - (0xFAC9, "M", "韛"), - (0xFACA, "M", "響"), - (0xFACB, "M", "頋"), - (0xFACC, "M", "頻"), - (0xFACD, "M", "鬒"), - (0xFACE, "M", "龜"), - (0xFACF, "M", "𢡊"), - (0xFAD0, "M", "𢡄"), - (0xFAD1, "M", "𣏕"), - (0xFAD2, "M", "㮝"), - (0xFAD3, "M", "䀘"), - (0xFAD4, "M", "䀹"), - (0xFAD5, "M", "𥉉"), - (0xFAD6, "M", "𥳐"), - (0xFAD7, "M", "𧻓"), - (0xFAD8, "M", "齃"), - (0xFAD9, "M", "龎"), - (0xFADA, "X"), - (0xFB00, "M", "ff"), - (0xFB01, "M", "fi"), - (0xFB02, "M", "fl"), - (0xFB03, "M", "ffi"), - (0xFB04, "M", "ffl"), - (0xFB05, "M", "st"), - ] - - -def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFB07, "X"), - (0xFB13, "M", "մն"), - (0xFB14, "M", "մե"), - (0xFB15, "M", "մի"), - (0xFB16, "M", "վն"), - (0xFB17, "M", "մխ"), - (0xFB18, "X"), - (0xFB1D, "M", "יִ"), - (0xFB1E, "V"), - (0xFB1F, "M", "ײַ"), - (0xFB20, "M", "ע"), - (0xFB21, "M", "א"), - (0xFB22, "M", "ד"), - (0xFB23, "M", "ה"), - (0xFB24, "M", "כ"), - (0xFB25, "M", "ל"), - (0xFB26, "M", "ם"), - (0xFB27, "M", "ר"), - (0xFB28, "M", "ת"), - (0xFB29, "3", "+"), - (0xFB2A, "M", "שׁ"), - (0xFB2B, "M", "שׂ"), - (0xFB2C, "M", "שּׁ"), - (0xFB2D, "M", "שּׂ"), - (0xFB2E, "M", "אַ"), - (0xFB2F, "M", "אָ"), - (0xFB30, "M", "אּ"), - (0xFB31, "M", "בּ"), - (0xFB32, "M", "גּ"), - (0xFB33, "M", "דּ"), - (0xFB34, "M", "הּ"), - (0xFB35, "M", "וּ"), - (0xFB36, "M", "זּ"), - (0xFB37, "X"), - (0xFB38, "M", "טּ"), - (0xFB39, "M", "יּ"), - (0xFB3A, "M", "ךּ"), - (0xFB3B, "M", "כּ"), - (0xFB3C, "M", "לּ"), - (0xFB3D, "X"), - (0xFB3E, "M", "מּ"), - (0xFB3F, "X"), - (0xFB40, "M", "נּ"), - (0xFB41, "M", "סּ"), - (0xFB42, "X"), - (0xFB43, "M", "ףּ"), - (0xFB44, "M", "פּ"), - (0xFB45, "X"), - (0xFB46, "M", "צּ"), - (0xFB47, "M", "קּ"), - (0xFB48, "M", "רּ"), - (0xFB49, "M", "שּ"), - (0xFB4A, "M", "תּ"), - (0xFB4B, "M", "וֹ"), - (0xFB4C, "M", "בֿ"), - (0xFB4D, "M", "כֿ"), - (0xFB4E, "M", "פֿ"), - (0xFB4F, "M", "אל"), - (0xFB50, "M", "ٱ"), - (0xFB52, "M", "ٻ"), - (0xFB56, "M", "پ"), - (0xFB5A, "M", "ڀ"), - (0xFB5E, "M", "ٺ"), - (0xFB62, "M", "ٿ"), - (0xFB66, "M", "ٹ"), - (0xFB6A, "M", "ڤ"), - (0xFB6E, "M", "ڦ"), - (0xFB72, "M", "ڄ"), - (0xFB76, "M", "ڃ"), - (0xFB7A, "M", "چ"), - (0xFB7E, "M", "ڇ"), - (0xFB82, "M", "ڍ"), - (0xFB84, "M", "ڌ"), - (0xFB86, "M", "ڎ"), - (0xFB88, "M", "ڈ"), - (0xFB8A, "M", "ژ"), - (0xFB8C, "M", "ڑ"), - (0xFB8E, "M", "ک"), - (0xFB92, "M", "گ"), - (0xFB96, "M", "ڳ"), - (0xFB9A, "M", "ڱ"), - (0xFB9E, "M", "ں"), - (0xFBA0, "M", "ڻ"), - (0xFBA4, "M", "ۀ"), - (0xFBA6, "M", "ہ"), - (0xFBAA, "M", "ھ"), - (0xFBAE, "M", "ے"), - (0xFBB0, "M", "ۓ"), - (0xFBB2, "V"), - (0xFBC3, "X"), - (0xFBD3, "M", "ڭ"), - (0xFBD7, "M", "ۇ"), - (0xFBD9, "M", "ۆ"), - (0xFBDB, "M", "ۈ"), - (0xFBDD, "M", "ۇٴ"), - (0xFBDE, "M", "ۋ"), - (0xFBE0, "M", "ۅ"), - (0xFBE2, "M", "ۉ"), - (0xFBE4, "M", "ې"), - (0xFBE8, "M", "ى"), - ] - - -def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFBEA, "M", "ئا"), - (0xFBEC, "M", "ئە"), - (0xFBEE, "M", "ئو"), - (0xFBF0, "M", "ئۇ"), - (0xFBF2, "M", "ئۆ"), - (0xFBF4, "M", "ئۈ"), - (0xFBF6, "M", "ئې"), - (0xFBF9, "M", "ئى"), - (0xFBFC, "M", "ی"), - (0xFC00, "M", "ئج"), - (0xFC01, "M", "ئح"), - (0xFC02, "M", "ئم"), - (0xFC03, "M", "ئى"), - (0xFC04, "M", "ئي"), - (0xFC05, "M", "بج"), - (0xFC06, "M", "بح"), - (0xFC07, "M", "بخ"), - (0xFC08, "M", "بم"), - (0xFC09, "M", "بى"), - (0xFC0A, "M", "بي"), - (0xFC0B, "M", "تج"), - (0xFC0C, "M", "تح"), - (0xFC0D, "M", "تخ"), - (0xFC0E, "M", "تم"), - (0xFC0F, "M", "تى"), - (0xFC10, "M", "تي"), - (0xFC11, "M", "ثج"), - (0xFC12, "M", "ثم"), - (0xFC13, "M", "ثى"), - (0xFC14, "M", "ثي"), - (0xFC15, "M", "جح"), - (0xFC16, "M", "جم"), - (0xFC17, "M", "حج"), - (0xFC18, "M", "حم"), - (0xFC19, "M", "خج"), - (0xFC1A, "M", "خح"), - (0xFC1B, "M", "خم"), - (0xFC1C, "M", "سج"), - (0xFC1D, "M", "سح"), - (0xFC1E, "M", "سخ"), - (0xFC1F, "M", "سم"), - (0xFC20, "M", "صح"), - (0xFC21, "M", "صم"), - (0xFC22, "M", "ضج"), - (0xFC23, "M", "ضح"), - (0xFC24, "M", "ضخ"), - (0xFC25, "M", "ضم"), - (0xFC26, "M", "طح"), - (0xFC27, "M", "طم"), - (0xFC28, "M", "ظم"), - (0xFC29, "M", "عج"), - (0xFC2A, "M", "عم"), - (0xFC2B, "M", "غج"), - (0xFC2C, "M", "غم"), - (0xFC2D, "M", "فج"), - (0xFC2E, "M", "فح"), - (0xFC2F, "M", "فخ"), - (0xFC30, "M", "فم"), - (0xFC31, "M", "فى"), - (0xFC32, "M", "في"), - (0xFC33, "M", "قح"), - (0xFC34, "M", "قم"), - (0xFC35, "M", "قى"), - (0xFC36, "M", "قي"), - (0xFC37, "M", "كا"), - (0xFC38, "M", "كج"), - (0xFC39, "M", "كح"), - (0xFC3A, "M", "كخ"), - (0xFC3B, "M", "كل"), - (0xFC3C, "M", "كم"), - (0xFC3D, "M", "كى"), - (0xFC3E, "M", "كي"), - (0xFC3F, "M", "لج"), - (0xFC40, "M", "لح"), - (0xFC41, "M", "لخ"), - (0xFC42, "M", "لم"), - (0xFC43, "M", "لى"), - (0xFC44, "M", "لي"), - (0xFC45, "M", "مج"), - (0xFC46, "M", "مح"), - (0xFC47, "M", "مخ"), - (0xFC48, "M", "مم"), - (0xFC49, "M", "مى"), - (0xFC4A, "M", "مي"), - (0xFC4B, "M", "نج"), - (0xFC4C, "M", "نح"), - (0xFC4D, "M", "نخ"), - (0xFC4E, "M", "نم"), - (0xFC4F, "M", "نى"), - (0xFC50, "M", "ني"), - (0xFC51, "M", "هج"), - (0xFC52, "M", "هم"), - (0xFC53, "M", "هى"), - (0xFC54, "M", "هي"), - (0xFC55, "M", "يج"), - (0xFC56, "M", "يح"), - (0xFC57, "M", "يخ"), - (0xFC58, "M", "يم"), - (0xFC59, "M", "يى"), - (0xFC5A, "M", "يي"), - ] - - -def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFC5B, "M", "ذٰ"), - (0xFC5C, "M", "رٰ"), - (0xFC5D, "M", "ىٰ"), - (0xFC5E, "3", " ٌّ"), - (0xFC5F, "3", " ٍّ"), - (0xFC60, "3", " َّ"), - (0xFC61, "3", " ُّ"), - (0xFC62, "3", " ِّ"), - (0xFC63, "3", " ّٰ"), - (0xFC64, "M", "ئر"), - (0xFC65, "M", "ئز"), - (0xFC66, "M", "ئم"), - (0xFC67, "M", "ئن"), - (0xFC68, "M", "ئى"), - (0xFC69, "M", "ئي"), - (0xFC6A, "M", "بر"), - (0xFC6B, "M", "بز"), - (0xFC6C, "M", "بم"), - (0xFC6D, "M", "بن"), - (0xFC6E, "M", "بى"), - (0xFC6F, "M", "بي"), - (0xFC70, "M", "تر"), - (0xFC71, "M", "تز"), - (0xFC72, "M", "تم"), - (0xFC73, "M", "تن"), - (0xFC74, "M", "تى"), - (0xFC75, "M", "تي"), - (0xFC76, "M", "ثر"), - (0xFC77, "M", "ثز"), - (0xFC78, "M", "ثم"), - (0xFC79, "M", "ثن"), - (0xFC7A, "M", "ثى"), - (0xFC7B, "M", "ثي"), - (0xFC7C, "M", "فى"), - (0xFC7D, "M", "في"), - (0xFC7E, "M", "قى"), - (0xFC7F, "M", "قي"), - (0xFC80, "M", "كا"), - (0xFC81, "M", "كل"), - (0xFC82, "M", "كم"), - (0xFC83, "M", "كى"), - (0xFC84, "M", "كي"), - (0xFC85, "M", "لم"), - (0xFC86, "M", "لى"), - (0xFC87, "M", "لي"), - (0xFC88, "M", "ما"), - (0xFC89, "M", "مم"), - (0xFC8A, "M", "نر"), - (0xFC8B, "M", "نز"), - (0xFC8C, "M", "نم"), - (0xFC8D, "M", "نن"), - (0xFC8E, "M", "نى"), - (0xFC8F, "M", "ني"), - (0xFC90, "M", "ىٰ"), - (0xFC91, "M", "ير"), - (0xFC92, "M", "يز"), - (0xFC93, "M", "يم"), - (0xFC94, "M", "ين"), - (0xFC95, "M", "يى"), - (0xFC96, "M", "يي"), - (0xFC97, "M", "ئج"), - (0xFC98, "M", "ئح"), - (0xFC99, "M", "ئخ"), - (0xFC9A, "M", "ئم"), - (0xFC9B, "M", "ئه"), - (0xFC9C, "M", "بج"), - (0xFC9D, "M", "بح"), - (0xFC9E, "M", "بخ"), - (0xFC9F, "M", "بم"), - (0xFCA0, "M", "به"), - (0xFCA1, "M", "تج"), - (0xFCA2, "M", "تح"), - (0xFCA3, "M", "تخ"), - (0xFCA4, "M", "تم"), - (0xFCA5, "M", "ته"), - (0xFCA6, "M", "ثم"), - (0xFCA7, "M", "جح"), - (0xFCA8, "M", "جم"), - (0xFCA9, "M", "حج"), - (0xFCAA, "M", "حم"), - (0xFCAB, "M", "خج"), - (0xFCAC, "M", "خم"), - (0xFCAD, "M", "سج"), - (0xFCAE, "M", "سح"), - (0xFCAF, "M", "سخ"), - (0xFCB0, "M", "سم"), - (0xFCB1, "M", "صح"), - (0xFCB2, "M", "صخ"), - (0xFCB3, "M", "صم"), - (0xFCB4, "M", "ضج"), - (0xFCB5, "M", "ضح"), - (0xFCB6, "M", "ضخ"), - (0xFCB7, "M", "ضم"), - (0xFCB8, "M", "طح"), - (0xFCB9, "M", "ظم"), - (0xFCBA, "M", "عج"), - (0xFCBB, "M", "عم"), - (0xFCBC, "M", "غج"), - (0xFCBD, "M", "غم"), - (0xFCBE, "M", "فج"), - ] - - -def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFCBF, "M", "فح"), - (0xFCC0, "M", "فخ"), - (0xFCC1, "M", "فم"), - (0xFCC2, "M", "قح"), - (0xFCC3, "M", "قم"), - (0xFCC4, "M", "كج"), - (0xFCC5, "M", "كح"), - (0xFCC6, "M", "كخ"), - (0xFCC7, "M", "كل"), - (0xFCC8, "M", "كم"), - (0xFCC9, "M", "لج"), - (0xFCCA, "M", "لح"), - (0xFCCB, "M", "لخ"), - (0xFCCC, "M", "لم"), - (0xFCCD, "M", "له"), - (0xFCCE, "M", "مج"), - (0xFCCF, "M", "مح"), - (0xFCD0, "M", "مخ"), - (0xFCD1, "M", "مم"), - (0xFCD2, "M", "نج"), - (0xFCD3, "M", "نح"), - (0xFCD4, "M", "نخ"), - (0xFCD5, "M", "نم"), - (0xFCD6, "M", "نه"), - (0xFCD7, "M", "هج"), - (0xFCD8, "M", "هم"), - (0xFCD9, "M", "هٰ"), - (0xFCDA, "M", "يج"), - (0xFCDB, "M", "يح"), - (0xFCDC, "M", "يخ"), - (0xFCDD, "M", "يم"), - (0xFCDE, "M", "يه"), - (0xFCDF, "M", "ئم"), - (0xFCE0, "M", "ئه"), - (0xFCE1, "M", "بم"), - (0xFCE2, "M", "به"), - (0xFCE3, "M", "تم"), - (0xFCE4, "M", "ته"), - (0xFCE5, "M", "ثم"), - (0xFCE6, "M", "ثه"), - (0xFCE7, "M", "سم"), - (0xFCE8, "M", "سه"), - (0xFCE9, "M", "شم"), - (0xFCEA, "M", "شه"), - (0xFCEB, "M", "كل"), - (0xFCEC, "M", "كم"), - (0xFCED, "M", "لم"), - (0xFCEE, "M", "نم"), - (0xFCEF, "M", "نه"), - (0xFCF0, "M", "يم"), - (0xFCF1, "M", "يه"), - (0xFCF2, "M", "ـَّ"), - (0xFCF3, "M", "ـُّ"), - (0xFCF4, "M", "ـِّ"), - (0xFCF5, "M", "طى"), - (0xFCF6, "M", "طي"), - (0xFCF7, "M", "عى"), - (0xFCF8, "M", "عي"), - (0xFCF9, "M", "غى"), - (0xFCFA, "M", "غي"), - (0xFCFB, "M", "سى"), - (0xFCFC, "M", "سي"), - (0xFCFD, "M", "شى"), - (0xFCFE, "M", "شي"), - (0xFCFF, "M", "حى"), - (0xFD00, "M", "حي"), - (0xFD01, "M", "جى"), - (0xFD02, "M", "جي"), - (0xFD03, "M", "خى"), - (0xFD04, "M", "خي"), - (0xFD05, "M", "صى"), - (0xFD06, "M", "صي"), - (0xFD07, "M", "ضى"), - (0xFD08, "M", "ضي"), - (0xFD09, "M", "شج"), - (0xFD0A, "M", "شح"), - (0xFD0B, "M", "شخ"), - (0xFD0C, "M", "شم"), - (0xFD0D, "M", "شر"), - (0xFD0E, "M", "سر"), - (0xFD0F, "M", "صر"), - (0xFD10, "M", "ضر"), - (0xFD11, "M", "طى"), - (0xFD12, "M", "طي"), - (0xFD13, "M", "عى"), - (0xFD14, "M", "عي"), - (0xFD15, "M", "غى"), - (0xFD16, "M", "غي"), - (0xFD17, "M", "سى"), - (0xFD18, "M", "سي"), - (0xFD19, "M", "شى"), - (0xFD1A, "M", "شي"), - (0xFD1B, "M", "حى"), - (0xFD1C, "M", "حي"), - (0xFD1D, "M", "جى"), - (0xFD1E, "M", "جي"), - (0xFD1F, "M", "خى"), - (0xFD20, "M", "خي"), - (0xFD21, "M", "صى"), - (0xFD22, "M", "صي"), - ] - - -def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFD23, "M", "ضى"), - (0xFD24, "M", "ضي"), - (0xFD25, "M", "شج"), - (0xFD26, "M", "شح"), - (0xFD27, "M", "شخ"), - (0xFD28, "M", "شم"), - (0xFD29, "M", "شر"), - (0xFD2A, "M", "سر"), - (0xFD2B, "M", "صر"), - (0xFD2C, "M", "ضر"), - (0xFD2D, "M", "شج"), - (0xFD2E, "M", "شح"), - (0xFD2F, "M", "شخ"), - (0xFD30, "M", "شم"), - (0xFD31, "M", "سه"), - (0xFD32, "M", "شه"), - (0xFD33, "M", "طم"), - (0xFD34, "M", "سج"), - (0xFD35, "M", "سح"), - (0xFD36, "M", "سخ"), - (0xFD37, "M", "شج"), - (0xFD38, "M", "شح"), - (0xFD39, "M", "شخ"), - (0xFD3A, "M", "طم"), - (0xFD3B, "M", "ظم"), - (0xFD3C, "M", "اً"), - (0xFD3E, "V"), - (0xFD50, "M", "تجم"), - (0xFD51, "M", "تحج"), - (0xFD53, "M", "تحم"), - (0xFD54, "M", "تخم"), - (0xFD55, "M", "تمج"), - (0xFD56, "M", "تمح"), - (0xFD57, "M", "تمخ"), - (0xFD58, "M", "جمح"), - (0xFD5A, "M", "حمي"), - (0xFD5B, "M", "حمى"), - (0xFD5C, "M", "سحج"), - (0xFD5D, "M", "سجح"), - (0xFD5E, "M", "سجى"), - (0xFD5F, "M", "سمح"), - (0xFD61, "M", "سمج"), - (0xFD62, "M", "سمم"), - (0xFD64, "M", "صحح"), - (0xFD66, "M", "صمم"), - (0xFD67, "M", "شحم"), - (0xFD69, "M", "شجي"), - (0xFD6A, "M", "شمخ"), - (0xFD6C, "M", "شمم"), - (0xFD6E, "M", "ضحى"), - (0xFD6F, "M", "ضخم"), - (0xFD71, "M", "طمح"), - (0xFD73, "M", "طمم"), - (0xFD74, "M", "طمي"), - (0xFD75, "M", "عجم"), - (0xFD76, "M", "عمم"), - (0xFD78, "M", "عمى"), - (0xFD79, "M", "غمم"), - (0xFD7A, "M", "غمي"), - (0xFD7B, "M", "غمى"), - (0xFD7C, "M", "فخم"), - (0xFD7E, "M", "قمح"), - (0xFD7F, "M", "قمم"), - (0xFD80, "M", "لحم"), - (0xFD81, "M", "لحي"), - (0xFD82, "M", "لحى"), - (0xFD83, "M", "لجج"), - (0xFD85, "M", "لخم"), - (0xFD87, "M", "لمح"), - (0xFD89, "M", "محج"), - (0xFD8A, "M", "محم"), - (0xFD8B, "M", "محي"), - (0xFD8C, "M", "مجح"), - (0xFD8D, "M", "مجم"), - (0xFD8E, "M", "مخج"), - (0xFD8F, "M", "مخم"), - (0xFD90, "X"), - (0xFD92, "M", "مجخ"), - (0xFD93, "M", "همج"), - (0xFD94, "M", "همم"), - (0xFD95, "M", "نحم"), - (0xFD96, "M", "نحى"), - (0xFD97, "M", "نجم"), - (0xFD99, "M", "نجى"), - (0xFD9A, "M", "نمي"), - (0xFD9B, "M", "نمى"), - (0xFD9C, "M", "يمم"), - (0xFD9E, "M", "بخي"), - (0xFD9F, "M", "تجي"), - (0xFDA0, "M", "تجى"), - (0xFDA1, "M", "تخي"), - (0xFDA2, "M", "تخى"), - (0xFDA3, "M", "تمي"), - (0xFDA4, "M", "تمى"), - (0xFDA5, "M", "جمي"), - (0xFDA6, "M", "جحى"), - (0xFDA7, "M", "جمى"), - (0xFDA8, "M", "سخى"), - (0xFDA9, "M", "صحي"), - (0xFDAA, "M", "شحي"), - ] - - -def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFDAB, "M", "ضحي"), - (0xFDAC, "M", "لجي"), - (0xFDAD, "M", "لمي"), - (0xFDAE, "M", "يحي"), - (0xFDAF, "M", "يجي"), - (0xFDB0, "M", "يمي"), - (0xFDB1, "M", "ممي"), - (0xFDB2, "M", "قمي"), - (0xFDB3, "M", "نحي"), - (0xFDB4, "M", "قمح"), - (0xFDB5, "M", "لحم"), - (0xFDB6, "M", "عمي"), - (0xFDB7, "M", "كمي"), - (0xFDB8, "M", "نجح"), - (0xFDB9, "M", "مخي"), - (0xFDBA, "M", "لجم"), - (0xFDBB, "M", "كمم"), - (0xFDBC, "M", "لجم"), - (0xFDBD, "M", "نجح"), - (0xFDBE, "M", "جحي"), - (0xFDBF, "M", "حجي"), - (0xFDC0, "M", "مجي"), - (0xFDC1, "M", "فمي"), - (0xFDC2, "M", "بحي"), - (0xFDC3, "M", "كمم"), - (0xFDC4, "M", "عجم"), - (0xFDC5, "M", "صمم"), - (0xFDC6, "M", "سخي"), - (0xFDC7, "M", "نجي"), - (0xFDC8, "X"), - (0xFDCF, "V"), - (0xFDD0, "X"), - (0xFDF0, "M", "صلے"), - (0xFDF1, "M", "قلے"), - (0xFDF2, "M", "الله"), - (0xFDF3, "M", "اكبر"), - (0xFDF4, "M", "محمد"), - (0xFDF5, "M", "صلعم"), - (0xFDF6, "M", "رسول"), - (0xFDF7, "M", "عليه"), - (0xFDF8, "M", "وسلم"), - (0xFDF9, "M", "صلى"), - (0xFDFA, "3", "صلى الله عليه وسلم"), - (0xFDFB, "3", "جل جلاله"), - (0xFDFC, "M", "ریال"), - (0xFDFD, "V"), - (0xFE00, "I"), - (0xFE10, "3", ","), - (0xFE11, "M", "、"), - (0xFE12, "X"), - (0xFE13, "3", ":"), - (0xFE14, "3", ";"), - (0xFE15, "3", "!"), - (0xFE16, "3", "?"), - (0xFE17, "M", "〖"), - (0xFE18, "M", "〗"), - (0xFE19, "X"), - (0xFE20, "V"), - (0xFE30, "X"), - (0xFE31, "M", "—"), - (0xFE32, "M", "–"), - (0xFE33, "3", "_"), - (0xFE35, "3", "("), - (0xFE36, "3", ")"), - (0xFE37, "3", "{"), - (0xFE38, "3", "}"), - (0xFE39, "M", "〔"), - (0xFE3A, "M", "〕"), - (0xFE3B, "M", "【"), - (0xFE3C, "M", "】"), - (0xFE3D, "M", "《"), - (0xFE3E, "M", "》"), - (0xFE3F, "M", "〈"), - (0xFE40, "M", "〉"), - (0xFE41, "M", "「"), - (0xFE42, "M", "」"), - (0xFE43, "M", "『"), - (0xFE44, "M", "』"), - (0xFE45, "V"), - (0xFE47, "3", "["), - (0xFE48, "3", "]"), - (0xFE49, "3", " ̅"), - (0xFE4D, "3", "_"), - (0xFE50, "3", ","), - (0xFE51, "M", "、"), - (0xFE52, "X"), - (0xFE54, "3", ";"), - (0xFE55, "3", ":"), - (0xFE56, "3", "?"), - (0xFE57, "3", "!"), - (0xFE58, "M", "—"), - (0xFE59, "3", "("), - (0xFE5A, "3", ")"), - (0xFE5B, "3", "{"), - (0xFE5C, "3", "}"), - (0xFE5D, "M", "〔"), - (0xFE5E, "M", "〕"), - (0xFE5F, "3", "#"), - (0xFE60, "3", "&"), - (0xFE61, "3", "*"), - ] - - -def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFE62, "3", "+"), - (0xFE63, "M", "-"), - (0xFE64, "3", "<"), - (0xFE65, "3", ">"), - (0xFE66, "3", "="), - (0xFE67, "X"), - (0xFE68, "3", "\\"), - (0xFE69, "3", "$"), - (0xFE6A, "3", "%"), - (0xFE6B, "3", "@"), - (0xFE6C, "X"), - (0xFE70, "3", " ً"), - (0xFE71, "M", "ـً"), - (0xFE72, "3", " ٌ"), - (0xFE73, "V"), - (0xFE74, "3", " ٍ"), - (0xFE75, "X"), - (0xFE76, "3", " َ"), - (0xFE77, "M", "ـَ"), - (0xFE78, "3", " ُ"), - (0xFE79, "M", "ـُ"), - (0xFE7A, "3", " ِ"), - (0xFE7B, "M", "ـِ"), - (0xFE7C, "3", " ّ"), - (0xFE7D, "M", "ـّ"), - (0xFE7E, "3", " ْ"), - (0xFE7F, "M", "ـْ"), - (0xFE80, "M", "ء"), - (0xFE81, "M", "آ"), - (0xFE83, "M", "أ"), - (0xFE85, "M", "ؤ"), - (0xFE87, "M", "إ"), - (0xFE89, "M", "ئ"), - (0xFE8D, "M", "ا"), - (0xFE8F, "M", "ب"), - (0xFE93, "M", "ة"), - (0xFE95, "M", "ت"), - (0xFE99, "M", "ث"), - (0xFE9D, "M", "ج"), - (0xFEA1, "M", "ح"), - (0xFEA5, "M", "خ"), - (0xFEA9, "M", "د"), - (0xFEAB, "M", "ذ"), - (0xFEAD, "M", "ر"), - (0xFEAF, "M", "ز"), - (0xFEB1, "M", "س"), - (0xFEB5, "M", "ش"), - (0xFEB9, "M", "ص"), - (0xFEBD, "M", "ض"), - (0xFEC1, "M", "ط"), - (0xFEC5, "M", "ظ"), - (0xFEC9, "M", "ع"), - (0xFECD, "M", "غ"), - (0xFED1, "M", "ف"), - (0xFED5, "M", "ق"), - (0xFED9, "M", "ك"), - (0xFEDD, "M", "ل"), - (0xFEE1, "M", "م"), - (0xFEE5, "M", "ن"), - (0xFEE9, "M", "ه"), - (0xFEED, "M", "و"), - (0xFEEF, "M", "ى"), - (0xFEF1, "M", "ي"), - (0xFEF5, "M", "لآ"), - (0xFEF7, "M", "لأ"), - (0xFEF9, "M", "لإ"), - (0xFEFB, "M", "لا"), - (0xFEFD, "X"), - (0xFEFF, "I"), - (0xFF00, "X"), - (0xFF01, "3", "!"), - (0xFF02, "3", '"'), - (0xFF03, "3", "#"), - (0xFF04, "3", "$"), - (0xFF05, "3", "%"), - (0xFF06, "3", "&"), - (0xFF07, "3", "'"), - (0xFF08, "3", "("), - (0xFF09, "3", ")"), - (0xFF0A, "3", "*"), - (0xFF0B, "3", "+"), - (0xFF0C, "3", ","), - (0xFF0D, "M", "-"), - (0xFF0E, "M", "."), - (0xFF0F, "3", "/"), - (0xFF10, "M", "0"), - (0xFF11, "M", "1"), - (0xFF12, "M", "2"), - (0xFF13, "M", "3"), - (0xFF14, "M", "4"), - (0xFF15, "M", "5"), - (0xFF16, "M", "6"), - (0xFF17, "M", "7"), - (0xFF18, "M", "8"), - (0xFF19, "M", "9"), - (0xFF1A, "3", ":"), - (0xFF1B, "3", ";"), - (0xFF1C, "3", "<"), - (0xFF1D, "3", "="), - (0xFF1E, "3", ">"), - ] - - -def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF1F, "3", "?"), - (0xFF20, "3", "@"), - (0xFF21, "M", "a"), - (0xFF22, "M", "b"), - (0xFF23, "M", "c"), - (0xFF24, "M", "d"), - (0xFF25, "M", "e"), - (0xFF26, "M", "f"), - (0xFF27, "M", "g"), - (0xFF28, "M", "h"), - (0xFF29, "M", "i"), - (0xFF2A, "M", "j"), - (0xFF2B, "M", "k"), - (0xFF2C, "M", "l"), - (0xFF2D, "M", "m"), - (0xFF2E, "M", "n"), - (0xFF2F, "M", "o"), - (0xFF30, "M", "p"), - (0xFF31, "M", "q"), - (0xFF32, "M", "r"), - (0xFF33, "M", "s"), - (0xFF34, "M", "t"), - (0xFF35, "M", "u"), - (0xFF36, "M", "v"), - (0xFF37, "M", "w"), - (0xFF38, "M", "x"), - (0xFF39, "M", "y"), - (0xFF3A, "M", "z"), - (0xFF3B, "3", "["), - (0xFF3C, "3", "\\"), - (0xFF3D, "3", "]"), - (0xFF3E, "3", "^"), - (0xFF3F, "3", "_"), - (0xFF40, "3", "`"), - (0xFF41, "M", "a"), - (0xFF42, "M", "b"), - (0xFF43, "M", "c"), - (0xFF44, "M", "d"), - (0xFF45, "M", "e"), - (0xFF46, "M", "f"), - (0xFF47, "M", "g"), - (0xFF48, "M", "h"), - (0xFF49, "M", "i"), - (0xFF4A, "M", "j"), - (0xFF4B, "M", "k"), - (0xFF4C, "M", "l"), - (0xFF4D, "M", "m"), - (0xFF4E, "M", "n"), - (0xFF4F, "M", "o"), - (0xFF50, "M", "p"), - (0xFF51, "M", "q"), - (0xFF52, "M", "r"), - (0xFF53, "M", "s"), - (0xFF54, "M", "t"), - (0xFF55, "M", "u"), - (0xFF56, "M", "v"), - (0xFF57, "M", "w"), - (0xFF58, "M", "x"), - (0xFF59, "M", "y"), - (0xFF5A, "M", "z"), - (0xFF5B, "3", "{"), - (0xFF5C, "3", "|"), - (0xFF5D, "3", "}"), - (0xFF5E, "3", "~"), - (0xFF5F, "M", "⦅"), - (0xFF60, "M", "⦆"), - (0xFF61, "M", "."), - (0xFF62, "M", "「"), - (0xFF63, "M", "」"), - (0xFF64, "M", "、"), - (0xFF65, "M", "・"), - (0xFF66, "M", "ヲ"), - (0xFF67, "M", "ァ"), - (0xFF68, "M", "ィ"), - (0xFF69, "M", "ゥ"), - (0xFF6A, "M", "ェ"), - (0xFF6B, "M", "ォ"), - (0xFF6C, "M", "ャ"), - (0xFF6D, "M", "ュ"), - (0xFF6E, "M", "ョ"), - (0xFF6F, "M", "ッ"), - (0xFF70, "M", "ー"), - (0xFF71, "M", "ア"), - (0xFF72, "M", "イ"), - (0xFF73, "M", "ウ"), - (0xFF74, "M", "エ"), - (0xFF75, "M", "オ"), - (0xFF76, "M", "カ"), - (0xFF77, "M", "キ"), - (0xFF78, "M", "ク"), - (0xFF79, "M", "ケ"), - (0xFF7A, "M", "コ"), - (0xFF7B, "M", "サ"), - (0xFF7C, "M", "シ"), - (0xFF7D, "M", "ス"), - (0xFF7E, "M", "セ"), - (0xFF7F, "M", "ソ"), - (0xFF80, "M", "タ"), - (0xFF81, "M", "チ"), - (0xFF82, "M", "ツ"), - ] - - -def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF83, "M", "テ"), - (0xFF84, "M", "ト"), - (0xFF85, "M", "ナ"), - (0xFF86, "M", "ニ"), - (0xFF87, "M", "ヌ"), - (0xFF88, "M", "ネ"), - (0xFF89, "M", "ノ"), - (0xFF8A, "M", "ハ"), - (0xFF8B, "M", "ヒ"), - (0xFF8C, "M", "フ"), - (0xFF8D, "M", "ヘ"), - (0xFF8E, "M", "ホ"), - (0xFF8F, "M", "マ"), - (0xFF90, "M", "ミ"), - (0xFF91, "M", "ム"), - (0xFF92, "M", "メ"), - (0xFF93, "M", "モ"), - (0xFF94, "M", "ヤ"), - (0xFF95, "M", "ユ"), - (0xFF96, "M", "ヨ"), - (0xFF97, "M", "ラ"), - (0xFF98, "M", "リ"), - (0xFF99, "M", "ル"), - (0xFF9A, "M", "レ"), - (0xFF9B, "M", "ロ"), - (0xFF9C, "M", "ワ"), - (0xFF9D, "M", "ン"), - (0xFF9E, "M", "゙"), - (0xFF9F, "M", "゚"), - (0xFFA0, "X"), - (0xFFA1, "M", "ᄀ"), - (0xFFA2, "M", "ᄁ"), - (0xFFA3, "M", "ᆪ"), - (0xFFA4, "M", "ᄂ"), - (0xFFA5, "M", "ᆬ"), - (0xFFA6, "M", "ᆭ"), - (0xFFA7, "M", "ᄃ"), - (0xFFA8, "M", "ᄄ"), - (0xFFA9, "M", "ᄅ"), - (0xFFAA, "M", "ᆰ"), - (0xFFAB, "M", "ᆱ"), - (0xFFAC, "M", "ᆲ"), - (0xFFAD, "M", "ᆳ"), - (0xFFAE, "M", "ᆴ"), - (0xFFAF, "M", "ᆵ"), - (0xFFB0, "M", "ᄚ"), - (0xFFB1, "M", "ᄆ"), - (0xFFB2, "M", "ᄇ"), - (0xFFB3, "M", "ᄈ"), - (0xFFB4, "M", "ᄡ"), - (0xFFB5, "M", "ᄉ"), - (0xFFB6, "M", "ᄊ"), - (0xFFB7, "M", "ᄋ"), - (0xFFB8, "M", "ᄌ"), - (0xFFB9, "M", "ᄍ"), - (0xFFBA, "M", "ᄎ"), - (0xFFBB, "M", "ᄏ"), - (0xFFBC, "M", "ᄐ"), - (0xFFBD, "M", "ᄑ"), - (0xFFBE, "M", "ᄒ"), - (0xFFBF, "X"), - (0xFFC2, "M", "ᅡ"), - (0xFFC3, "M", "ᅢ"), - (0xFFC4, "M", "ᅣ"), - (0xFFC5, "M", "ᅤ"), - (0xFFC6, "M", "ᅥ"), - (0xFFC7, "M", "ᅦ"), - (0xFFC8, "X"), - (0xFFCA, "M", "ᅧ"), - (0xFFCB, "M", "ᅨ"), - (0xFFCC, "M", "ᅩ"), - (0xFFCD, "M", "ᅪ"), - (0xFFCE, "M", "ᅫ"), - (0xFFCF, "M", "ᅬ"), - (0xFFD0, "X"), - (0xFFD2, "M", "ᅭ"), - (0xFFD3, "M", "ᅮ"), - (0xFFD4, "M", "ᅯ"), - (0xFFD5, "M", "ᅰ"), - (0xFFD6, "M", "ᅱ"), - (0xFFD7, "M", "ᅲ"), - (0xFFD8, "X"), - (0xFFDA, "M", "ᅳ"), - (0xFFDB, "M", "ᅴ"), - (0xFFDC, "M", "ᅵ"), - (0xFFDD, "X"), - (0xFFE0, "M", "¢"), - (0xFFE1, "M", "£"), - (0xFFE2, "M", "¬"), - (0xFFE3, "3", " ̄"), - (0xFFE4, "M", "¦"), - (0xFFE5, "M", "¥"), - (0xFFE6, "M", "₩"), - (0xFFE7, "X"), - (0xFFE8, "M", "│"), - (0xFFE9, "M", "←"), - (0xFFEA, "M", "↑"), - (0xFFEB, "M", "→"), - (0xFFEC, "M", "↓"), - (0xFFED, "M", "■"), - ] - - -def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFFEE, "M", "○"), - (0xFFEF, "X"), - (0x10000, "V"), - (0x1000C, "X"), - (0x1000D, "V"), - (0x10027, "X"), - (0x10028, "V"), - (0x1003B, "X"), - (0x1003C, "V"), - (0x1003E, "X"), - (0x1003F, "V"), - (0x1004E, "X"), - (0x10050, "V"), - (0x1005E, "X"), - (0x10080, "V"), - (0x100FB, "X"), - (0x10100, "V"), - (0x10103, "X"), - (0x10107, "V"), - (0x10134, "X"), - (0x10137, "V"), - (0x1018F, "X"), - (0x10190, "V"), - (0x1019D, "X"), - (0x101A0, "V"), - (0x101A1, "X"), - (0x101D0, "V"), - (0x101FE, "X"), - (0x10280, "V"), - (0x1029D, "X"), - (0x102A0, "V"), - (0x102D1, "X"), - (0x102E0, "V"), - (0x102FC, "X"), - (0x10300, "V"), - (0x10324, "X"), - (0x1032D, "V"), - (0x1034B, "X"), - (0x10350, "V"), - (0x1037B, "X"), - (0x10380, "V"), - (0x1039E, "X"), - (0x1039F, "V"), - (0x103C4, "X"), - (0x103C8, "V"), - (0x103D6, "X"), - (0x10400, "M", "𐐨"), - (0x10401, "M", "𐐩"), - (0x10402, "M", "𐐪"), - (0x10403, "M", "𐐫"), - (0x10404, "M", "𐐬"), - (0x10405, "M", "𐐭"), - (0x10406, "M", "𐐮"), - (0x10407, "M", "𐐯"), - (0x10408, "M", "𐐰"), - (0x10409, "M", "𐐱"), - (0x1040A, "M", "𐐲"), - (0x1040B, "M", "𐐳"), - (0x1040C, "M", "𐐴"), - (0x1040D, "M", "𐐵"), - (0x1040E, "M", "𐐶"), - (0x1040F, "M", "𐐷"), - (0x10410, "M", "𐐸"), - (0x10411, "M", "𐐹"), - (0x10412, "M", "𐐺"), - (0x10413, "M", "𐐻"), - (0x10414, "M", "𐐼"), - (0x10415, "M", "𐐽"), - (0x10416, "M", "𐐾"), - (0x10417, "M", "𐐿"), - (0x10418, "M", "𐑀"), - (0x10419, "M", "𐑁"), - (0x1041A, "M", "𐑂"), - (0x1041B, "M", "𐑃"), - (0x1041C, "M", "𐑄"), - (0x1041D, "M", "𐑅"), - (0x1041E, "M", "𐑆"), - (0x1041F, "M", "𐑇"), - (0x10420, "M", "𐑈"), - (0x10421, "M", "𐑉"), - (0x10422, "M", "𐑊"), - (0x10423, "M", "𐑋"), - (0x10424, "M", "𐑌"), - (0x10425, "M", "𐑍"), - (0x10426, "M", "𐑎"), - (0x10427, "M", "𐑏"), - (0x10428, "V"), - (0x1049E, "X"), - (0x104A0, "V"), - (0x104AA, "X"), - (0x104B0, "M", "𐓘"), - (0x104B1, "M", "𐓙"), - (0x104B2, "M", "𐓚"), - (0x104B3, "M", "𐓛"), - (0x104B4, "M", "𐓜"), - (0x104B5, "M", "𐓝"), - (0x104B6, "M", "𐓞"), - (0x104B7, "M", "𐓟"), - (0x104B8, "M", "𐓠"), - (0x104B9, "M", "𐓡"), - ] - - -def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x104BA, "M", "𐓢"), - (0x104BB, "M", "𐓣"), - (0x104BC, "M", "𐓤"), - (0x104BD, "M", "𐓥"), - (0x104BE, "M", "𐓦"), - (0x104BF, "M", "𐓧"), - (0x104C0, "M", "𐓨"), - (0x104C1, "M", "𐓩"), - (0x104C2, "M", "𐓪"), - (0x104C3, "M", "𐓫"), - (0x104C4, "M", "𐓬"), - (0x104C5, "M", "𐓭"), - (0x104C6, "M", "𐓮"), - (0x104C7, "M", "𐓯"), - (0x104C8, "M", "𐓰"), - (0x104C9, "M", "𐓱"), - (0x104CA, "M", "𐓲"), - (0x104CB, "M", "𐓳"), - (0x104CC, "M", "𐓴"), - (0x104CD, "M", "𐓵"), - (0x104CE, "M", "𐓶"), - (0x104CF, "M", "𐓷"), - (0x104D0, "M", "𐓸"), - (0x104D1, "M", "𐓹"), - (0x104D2, "M", "𐓺"), - (0x104D3, "M", "𐓻"), - (0x104D4, "X"), - (0x104D8, "V"), - (0x104FC, "X"), - (0x10500, "V"), - (0x10528, "X"), - (0x10530, "V"), - (0x10564, "X"), - (0x1056F, "V"), - (0x10570, "M", "𐖗"), - (0x10571, "M", "𐖘"), - (0x10572, "M", "𐖙"), - (0x10573, "M", "𐖚"), - (0x10574, "M", "𐖛"), - (0x10575, "M", "𐖜"), - (0x10576, "M", "𐖝"), - (0x10577, "M", "𐖞"), - (0x10578, "M", "𐖟"), - (0x10579, "M", "𐖠"), - (0x1057A, "M", "𐖡"), - (0x1057B, "X"), - (0x1057C, "M", "𐖣"), - (0x1057D, "M", "𐖤"), - (0x1057E, "M", "𐖥"), - (0x1057F, "M", "𐖦"), - (0x10580, "M", "𐖧"), - (0x10581, "M", "𐖨"), - (0x10582, "M", "𐖩"), - (0x10583, "M", "𐖪"), - (0x10584, "M", "𐖫"), - (0x10585, "M", "𐖬"), - (0x10586, "M", "𐖭"), - (0x10587, "M", "𐖮"), - (0x10588, "M", "𐖯"), - (0x10589, "M", "𐖰"), - (0x1058A, "M", "𐖱"), - (0x1058B, "X"), - (0x1058C, "M", "𐖳"), - (0x1058D, "M", "𐖴"), - (0x1058E, "M", "𐖵"), - (0x1058F, "M", "𐖶"), - (0x10590, "M", "𐖷"), - (0x10591, "M", "𐖸"), - (0x10592, "M", "𐖹"), - (0x10593, "X"), - (0x10594, "M", "𐖻"), - (0x10595, "M", "𐖼"), - (0x10596, "X"), - (0x10597, "V"), - (0x105A2, "X"), - (0x105A3, "V"), - (0x105B2, "X"), - (0x105B3, "V"), - (0x105BA, "X"), - (0x105BB, "V"), - (0x105BD, "X"), - (0x10600, "V"), - (0x10737, "X"), - (0x10740, "V"), - (0x10756, "X"), - (0x10760, "V"), - (0x10768, "X"), - (0x10780, "V"), - (0x10781, "M", "ː"), - (0x10782, "M", "ˑ"), - (0x10783, "M", "æ"), - (0x10784, "M", "ʙ"), - (0x10785, "M", "ɓ"), - (0x10786, "X"), - (0x10787, "M", "ʣ"), - (0x10788, "M", "ꭦ"), - (0x10789, "M", "ʥ"), - (0x1078A, "M", "ʤ"), - (0x1078B, "M", "ɖ"), - (0x1078C, "M", "ɗ"), - ] - - -def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1078D, "M", "ᶑ"), - (0x1078E, "M", "ɘ"), - (0x1078F, "M", "ɞ"), - (0x10790, "M", "ʩ"), - (0x10791, "M", "ɤ"), - (0x10792, "M", "ɢ"), - (0x10793, "M", "ɠ"), - (0x10794, "M", "ʛ"), - (0x10795, "M", "ħ"), - (0x10796, "M", "ʜ"), - (0x10797, "M", "ɧ"), - (0x10798, "M", "ʄ"), - (0x10799, "M", "ʪ"), - (0x1079A, "M", "ʫ"), - (0x1079B, "M", "ɬ"), - (0x1079C, "M", "𝼄"), - (0x1079D, "M", "ꞎ"), - (0x1079E, "M", "ɮ"), - (0x1079F, "M", "𝼅"), - (0x107A0, "M", "ʎ"), - (0x107A1, "M", "𝼆"), - (0x107A2, "M", "ø"), - (0x107A3, "M", "ɶ"), - (0x107A4, "M", "ɷ"), - (0x107A5, "M", "q"), - (0x107A6, "M", "ɺ"), - (0x107A7, "M", "𝼈"), - (0x107A8, "M", "ɽ"), - (0x107A9, "M", "ɾ"), - (0x107AA, "M", "ʀ"), - (0x107AB, "M", "ʨ"), - (0x107AC, "M", "ʦ"), - (0x107AD, "M", "ꭧ"), - (0x107AE, "M", "ʧ"), - (0x107AF, "M", "ʈ"), - (0x107B0, "M", "ⱱ"), - (0x107B1, "X"), - (0x107B2, "M", "ʏ"), - (0x107B3, "M", "ʡ"), - (0x107B4, "M", "ʢ"), - (0x107B5, "M", "ʘ"), - (0x107B6, "M", "ǀ"), - (0x107B7, "M", "ǁ"), - (0x107B8, "M", "ǂ"), - (0x107B9, "M", "𝼊"), - (0x107BA, "M", "𝼞"), - (0x107BB, "X"), - (0x10800, "V"), - (0x10806, "X"), - (0x10808, "V"), - (0x10809, "X"), - (0x1080A, "V"), - (0x10836, "X"), - (0x10837, "V"), - (0x10839, "X"), - (0x1083C, "V"), - (0x1083D, "X"), - (0x1083F, "V"), - (0x10856, "X"), - (0x10857, "V"), - (0x1089F, "X"), - (0x108A7, "V"), - (0x108B0, "X"), - (0x108E0, "V"), - (0x108F3, "X"), - (0x108F4, "V"), - (0x108F6, "X"), - (0x108FB, "V"), - (0x1091C, "X"), - (0x1091F, "V"), - (0x1093A, "X"), - (0x1093F, "V"), - (0x10940, "X"), - (0x10980, "V"), - (0x109B8, "X"), - (0x109BC, "V"), - (0x109D0, "X"), - (0x109D2, "V"), - (0x10A04, "X"), - (0x10A05, "V"), - (0x10A07, "X"), - (0x10A0C, "V"), - (0x10A14, "X"), - (0x10A15, "V"), - (0x10A18, "X"), - (0x10A19, "V"), - (0x10A36, "X"), - (0x10A38, "V"), - (0x10A3B, "X"), - (0x10A3F, "V"), - (0x10A49, "X"), - (0x10A50, "V"), - (0x10A59, "X"), - (0x10A60, "V"), - (0x10AA0, "X"), - (0x10AC0, "V"), - (0x10AE7, "X"), - (0x10AEB, "V"), - (0x10AF7, "X"), - (0x10B00, "V"), - ] - - -def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10B36, "X"), - (0x10B39, "V"), - (0x10B56, "X"), - (0x10B58, "V"), - (0x10B73, "X"), - (0x10B78, "V"), - (0x10B92, "X"), - (0x10B99, "V"), - (0x10B9D, "X"), - (0x10BA9, "V"), - (0x10BB0, "X"), - (0x10C00, "V"), - (0x10C49, "X"), - (0x10C80, "M", "𐳀"), - (0x10C81, "M", "𐳁"), - (0x10C82, "M", "𐳂"), - (0x10C83, "M", "𐳃"), - (0x10C84, "M", "𐳄"), - (0x10C85, "M", "𐳅"), - (0x10C86, "M", "𐳆"), - (0x10C87, "M", "𐳇"), - (0x10C88, "M", "𐳈"), - (0x10C89, "M", "𐳉"), - (0x10C8A, "M", "𐳊"), - (0x10C8B, "M", "𐳋"), - (0x10C8C, "M", "𐳌"), - (0x10C8D, "M", "𐳍"), - (0x10C8E, "M", "𐳎"), - (0x10C8F, "M", "𐳏"), - (0x10C90, "M", "𐳐"), - (0x10C91, "M", "𐳑"), - (0x10C92, "M", "𐳒"), - (0x10C93, "M", "𐳓"), - (0x10C94, "M", "𐳔"), - (0x10C95, "M", "𐳕"), - (0x10C96, "M", "𐳖"), - (0x10C97, "M", "𐳗"), - (0x10C98, "M", "𐳘"), - (0x10C99, "M", "𐳙"), - (0x10C9A, "M", "𐳚"), - (0x10C9B, "M", "𐳛"), - (0x10C9C, "M", "𐳜"), - (0x10C9D, "M", "𐳝"), - (0x10C9E, "M", "𐳞"), - (0x10C9F, "M", "𐳟"), - (0x10CA0, "M", "𐳠"), - (0x10CA1, "M", "𐳡"), - (0x10CA2, "M", "𐳢"), - (0x10CA3, "M", "𐳣"), - (0x10CA4, "M", "𐳤"), - (0x10CA5, "M", "𐳥"), - (0x10CA6, "M", "𐳦"), - (0x10CA7, "M", "𐳧"), - (0x10CA8, "M", "𐳨"), - (0x10CA9, "M", "𐳩"), - (0x10CAA, "M", "𐳪"), - (0x10CAB, "M", "𐳫"), - (0x10CAC, "M", "𐳬"), - (0x10CAD, "M", "𐳭"), - (0x10CAE, "M", "𐳮"), - (0x10CAF, "M", "𐳯"), - (0x10CB0, "M", "𐳰"), - (0x10CB1, "M", "𐳱"), - (0x10CB2, "M", "𐳲"), - (0x10CB3, "X"), - (0x10CC0, "V"), - (0x10CF3, "X"), - (0x10CFA, "V"), - (0x10D28, "X"), - (0x10D30, "V"), - (0x10D3A, "X"), - (0x10E60, "V"), - (0x10E7F, "X"), - (0x10E80, "V"), - (0x10EAA, "X"), - (0x10EAB, "V"), - (0x10EAE, "X"), - (0x10EB0, "V"), - (0x10EB2, "X"), - (0x10EFD, "V"), - (0x10F28, "X"), - (0x10F30, "V"), - (0x10F5A, "X"), - (0x10F70, "V"), - (0x10F8A, "X"), - (0x10FB0, "V"), - (0x10FCC, "X"), - (0x10FE0, "V"), - (0x10FF7, "X"), - (0x11000, "V"), - (0x1104E, "X"), - (0x11052, "V"), - (0x11076, "X"), - (0x1107F, "V"), - (0x110BD, "X"), - (0x110BE, "V"), - (0x110C3, "X"), - (0x110D0, "V"), - (0x110E9, "X"), - (0x110F0, "V"), - ] - - -def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x110FA, "X"), - (0x11100, "V"), - (0x11135, "X"), - (0x11136, "V"), - (0x11148, "X"), - (0x11150, "V"), - (0x11177, "X"), - (0x11180, "V"), - (0x111E0, "X"), - (0x111E1, "V"), - (0x111F5, "X"), - (0x11200, "V"), - (0x11212, "X"), - (0x11213, "V"), - (0x11242, "X"), - (0x11280, "V"), - (0x11287, "X"), - (0x11288, "V"), - (0x11289, "X"), - (0x1128A, "V"), - (0x1128E, "X"), - (0x1128F, "V"), - (0x1129E, "X"), - (0x1129F, "V"), - (0x112AA, "X"), - (0x112B0, "V"), - (0x112EB, "X"), - (0x112F0, "V"), - (0x112FA, "X"), - (0x11300, "V"), - (0x11304, "X"), - (0x11305, "V"), - (0x1130D, "X"), - (0x1130F, "V"), - (0x11311, "X"), - (0x11313, "V"), - (0x11329, "X"), - (0x1132A, "V"), - (0x11331, "X"), - (0x11332, "V"), - (0x11334, "X"), - (0x11335, "V"), - (0x1133A, "X"), - (0x1133B, "V"), - (0x11345, "X"), - (0x11347, "V"), - (0x11349, "X"), - (0x1134B, "V"), - (0x1134E, "X"), - (0x11350, "V"), - (0x11351, "X"), - (0x11357, "V"), - (0x11358, "X"), - (0x1135D, "V"), - (0x11364, "X"), - (0x11366, "V"), - (0x1136D, "X"), - (0x11370, "V"), - (0x11375, "X"), - (0x11400, "V"), - (0x1145C, "X"), - (0x1145D, "V"), - (0x11462, "X"), - (0x11480, "V"), - (0x114C8, "X"), - (0x114D0, "V"), - (0x114DA, "X"), - (0x11580, "V"), - (0x115B6, "X"), - (0x115B8, "V"), - (0x115DE, "X"), - (0x11600, "V"), - (0x11645, "X"), - (0x11650, "V"), - (0x1165A, "X"), - (0x11660, "V"), - (0x1166D, "X"), - (0x11680, "V"), - (0x116BA, "X"), - (0x116C0, "V"), - (0x116CA, "X"), - (0x11700, "V"), - (0x1171B, "X"), - (0x1171D, "V"), - (0x1172C, "X"), - (0x11730, "V"), - (0x11747, "X"), - (0x11800, "V"), - (0x1183C, "X"), - (0x118A0, "M", "𑣀"), - (0x118A1, "M", "𑣁"), - (0x118A2, "M", "𑣂"), - (0x118A3, "M", "𑣃"), - (0x118A4, "M", "𑣄"), - (0x118A5, "M", "𑣅"), - (0x118A6, "M", "𑣆"), - (0x118A7, "M", "𑣇"), - (0x118A8, "M", "𑣈"), - (0x118A9, "M", "𑣉"), - (0x118AA, "M", "𑣊"), - ] - - -def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x118AB, "M", "𑣋"), - (0x118AC, "M", "𑣌"), - (0x118AD, "M", "𑣍"), - (0x118AE, "M", "𑣎"), - (0x118AF, "M", "𑣏"), - (0x118B0, "M", "𑣐"), - (0x118B1, "M", "𑣑"), - (0x118B2, "M", "𑣒"), - (0x118B3, "M", "𑣓"), - (0x118B4, "M", "𑣔"), - (0x118B5, "M", "𑣕"), - (0x118B6, "M", "𑣖"), - (0x118B7, "M", "𑣗"), - (0x118B8, "M", "𑣘"), - (0x118B9, "M", "𑣙"), - (0x118BA, "M", "𑣚"), - (0x118BB, "M", "𑣛"), - (0x118BC, "M", "𑣜"), - (0x118BD, "M", "𑣝"), - (0x118BE, "M", "𑣞"), - (0x118BF, "M", "𑣟"), - (0x118C0, "V"), - (0x118F3, "X"), - (0x118FF, "V"), - (0x11907, "X"), - (0x11909, "V"), - (0x1190A, "X"), - (0x1190C, "V"), - (0x11914, "X"), - (0x11915, "V"), - (0x11917, "X"), - (0x11918, "V"), - (0x11936, "X"), - (0x11937, "V"), - (0x11939, "X"), - (0x1193B, "V"), - (0x11947, "X"), - (0x11950, "V"), - (0x1195A, "X"), - (0x119A0, "V"), - (0x119A8, "X"), - (0x119AA, "V"), - (0x119D8, "X"), - (0x119DA, "V"), - (0x119E5, "X"), - (0x11A00, "V"), - (0x11A48, "X"), - (0x11A50, "V"), - (0x11AA3, "X"), - (0x11AB0, "V"), - (0x11AF9, "X"), - (0x11B00, "V"), - (0x11B0A, "X"), - (0x11C00, "V"), - (0x11C09, "X"), - (0x11C0A, "V"), - (0x11C37, "X"), - (0x11C38, "V"), - (0x11C46, "X"), - (0x11C50, "V"), - (0x11C6D, "X"), - (0x11C70, "V"), - (0x11C90, "X"), - (0x11C92, "V"), - (0x11CA8, "X"), - (0x11CA9, "V"), - (0x11CB7, "X"), - (0x11D00, "V"), - (0x11D07, "X"), - (0x11D08, "V"), - (0x11D0A, "X"), - (0x11D0B, "V"), - (0x11D37, "X"), - (0x11D3A, "V"), - (0x11D3B, "X"), - (0x11D3C, "V"), - (0x11D3E, "X"), - (0x11D3F, "V"), - (0x11D48, "X"), - (0x11D50, "V"), - (0x11D5A, "X"), - (0x11D60, "V"), - (0x11D66, "X"), - (0x11D67, "V"), - (0x11D69, "X"), - (0x11D6A, "V"), - (0x11D8F, "X"), - (0x11D90, "V"), - (0x11D92, "X"), - (0x11D93, "V"), - (0x11D99, "X"), - (0x11DA0, "V"), - (0x11DAA, "X"), - (0x11EE0, "V"), - (0x11EF9, "X"), - (0x11F00, "V"), - (0x11F11, "X"), - (0x11F12, "V"), - (0x11F3B, "X"), - (0x11F3E, "V"), - ] - - -def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x11F5A, "X"), - (0x11FB0, "V"), - (0x11FB1, "X"), - (0x11FC0, "V"), - (0x11FF2, "X"), - (0x11FFF, "V"), - (0x1239A, "X"), - (0x12400, "V"), - (0x1246F, "X"), - (0x12470, "V"), - (0x12475, "X"), - (0x12480, "V"), - (0x12544, "X"), - (0x12F90, "V"), - (0x12FF3, "X"), - (0x13000, "V"), - (0x13430, "X"), - (0x13440, "V"), - (0x13456, "X"), - (0x14400, "V"), - (0x14647, "X"), - (0x16800, "V"), - (0x16A39, "X"), - (0x16A40, "V"), - (0x16A5F, "X"), - (0x16A60, "V"), - (0x16A6A, "X"), - (0x16A6E, "V"), - (0x16ABF, "X"), - (0x16AC0, "V"), - (0x16ACA, "X"), - (0x16AD0, "V"), - (0x16AEE, "X"), - (0x16AF0, "V"), - (0x16AF6, "X"), - (0x16B00, "V"), - (0x16B46, "X"), - (0x16B50, "V"), - (0x16B5A, "X"), - (0x16B5B, "V"), - (0x16B62, "X"), - (0x16B63, "V"), - (0x16B78, "X"), - (0x16B7D, "V"), - (0x16B90, "X"), - (0x16E40, "M", "𖹠"), - (0x16E41, "M", "𖹡"), - (0x16E42, "M", "𖹢"), - (0x16E43, "M", "𖹣"), - (0x16E44, "M", "𖹤"), - (0x16E45, "M", "𖹥"), - (0x16E46, "M", "𖹦"), - (0x16E47, "M", "𖹧"), - (0x16E48, "M", "𖹨"), - (0x16E49, "M", "𖹩"), - (0x16E4A, "M", "𖹪"), - (0x16E4B, "M", "𖹫"), - (0x16E4C, "M", "𖹬"), - (0x16E4D, "M", "𖹭"), - (0x16E4E, "M", "𖹮"), - (0x16E4F, "M", "𖹯"), - (0x16E50, "M", "𖹰"), - (0x16E51, "M", "𖹱"), - (0x16E52, "M", "𖹲"), - (0x16E53, "M", "𖹳"), - (0x16E54, "M", "𖹴"), - (0x16E55, "M", "𖹵"), - (0x16E56, "M", "𖹶"), - (0x16E57, "M", "𖹷"), - (0x16E58, "M", "𖹸"), - (0x16E59, "M", "𖹹"), - (0x16E5A, "M", "𖹺"), - (0x16E5B, "M", "𖹻"), - (0x16E5C, "M", "𖹼"), - (0x16E5D, "M", "𖹽"), - (0x16E5E, "M", "𖹾"), - (0x16E5F, "M", "𖹿"), - (0x16E60, "V"), - (0x16E9B, "X"), - (0x16F00, "V"), - (0x16F4B, "X"), - (0x16F4F, "V"), - (0x16F88, "X"), - (0x16F8F, "V"), - (0x16FA0, "X"), - (0x16FE0, "V"), - (0x16FE5, "X"), - (0x16FF0, "V"), - (0x16FF2, "X"), - (0x17000, "V"), - (0x187F8, "X"), - (0x18800, "V"), - (0x18CD6, "X"), - (0x18D00, "V"), - (0x18D09, "X"), - (0x1AFF0, "V"), - (0x1AFF4, "X"), - (0x1AFF5, "V"), - (0x1AFFC, "X"), - (0x1AFFD, "V"), - ] - - -def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1AFFF, "X"), - (0x1B000, "V"), - (0x1B123, "X"), - (0x1B132, "V"), - (0x1B133, "X"), - (0x1B150, "V"), - (0x1B153, "X"), - (0x1B155, "V"), - (0x1B156, "X"), - (0x1B164, "V"), - (0x1B168, "X"), - (0x1B170, "V"), - (0x1B2FC, "X"), - (0x1BC00, "V"), - (0x1BC6B, "X"), - (0x1BC70, "V"), - (0x1BC7D, "X"), - (0x1BC80, "V"), - (0x1BC89, "X"), - (0x1BC90, "V"), - (0x1BC9A, "X"), - (0x1BC9C, "V"), - (0x1BCA0, "I"), - (0x1BCA4, "X"), - (0x1CF00, "V"), - (0x1CF2E, "X"), - (0x1CF30, "V"), - (0x1CF47, "X"), - (0x1CF50, "V"), - (0x1CFC4, "X"), - (0x1D000, "V"), - (0x1D0F6, "X"), - (0x1D100, "V"), - (0x1D127, "X"), - (0x1D129, "V"), - (0x1D15E, "M", "𝅗𝅥"), - (0x1D15F, "M", "𝅘𝅥"), - (0x1D160, "M", "𝅘𝅥𝅮"), - (0x1D161, "M", "𝅘𝅥𝅯"), - (0x1D162, "M", "𝅘𝅥𝅰"), - (0x1D163, "M", "𝅘𝅥𝅱"), - (0x1D164, "M", "𝅘𝅥𝅲"), - (0x1D165, "V"), - (0x1D173, "X"), - (0x1D17B, "V"), - (0x1D1BB, "M", "𝆹𝅥"), - (0x1D1BC, "M", "𝆺𝅥"), - (0x1D1BD, "M", "𝆹𝅥𝅮"), - (0x1D1BE, "M", "𝆺𝅥𝅮"), - (0x1D1BF, "M", "𝆹𝅥𝅯"), - (0x1D1C0, "M", "𝆺𝅥𝅯"), - (0x1D1C1, "V"), - (0x1D1EB, "X"), - (0x1D200, "V"), - (0x1D246, "X"), - (0x1D2C0, "V"), - (0x1D2D4, "X"), - (0x1D2E0, "V"), - (0x1D2F4, "X"), - (0x1D300, "V"), - (0x1D357, "X"), - (0x1D360, "V"), - (0x1D379, "X"), - (0x1D400, "M", "a"), - (0x1D401, "M", "b"), - (0x1D402, "M", "c"), - (0x1D403, "M", "d"), - (0x1D404, "M", "e"), - (0x1D405, "M", "f"), - (0x1D406, "M", "g"), - (0x1D407, "M", "h"), - (0x1D408, "M", "i"), - (0x1D409, "M", "j"), - (0x1D40A, "M", "k"), - (0x1D40B, "M", "l"), - (0x1D40C, "M", "m"), - (0x1D40D, "M", "n"), - (0x1D40E, "M", "o"), - (0x1D40F, "M", "p"), - (0x1D410, "M", "q"), - (0x1D411, "M", "r"), - (0x1D412, "M", "s"), - (0x1D413, "M", "t"), - (0x1D414, "M", "u"), - (0x1D415, "M", "v"), - (0x1D416, "M", "w"), - (0x1D417, "M", "x"), - (0x1D418, "M", "y"), - (0x1D419, "M", "z"), - (0x1D41A, "M", "a"), - (0x1D41B, "M", "b"), - (0x1D41C, "M", "c"), - (0x1D41D, "M", "d"), - (0x1D41E, "M", "e"), - (0x1D41F, "M", "f"), - (0x1D420, "M", "g"), - (0x1D421, "M", "h"), - (0x1D422, "M", "i"), - (0x1D423, "M", "j"), - (0x1D424, "M", "k"), - ] - - -def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D425, "M", "l"), - (0x1D426, "M", "m"), - (0x1D427, "M", "n"), - (0x1D428, "M", "o"), - (0x1D429, "M", "p"), - (0x1D42A, "M", "q"), - (0x1D42B, "M", "r"), - (0x1D42C, "M", "s"), - (0x1D42D, "M", "t"), - (0x1D42E, "M", "u"), - (0x1D42F, "M", "v"), - (0x1D430, "M", "w"), - (0x1D431, "M", "x"), - (0x1D432, "M", "y"), - (0x1D433, "M", "z"), - (0x1D434, "M", "a"), - (0x1D435, "M", "b"), - (0x1D436, "M", "c"), - (0x1D437, "M", "d"), - (0x1D438, "M", "e"), - (0x1D439, "M", "f"), - (0x1D43A, "M", "g"), - (0x1D43B, "M", "h"), - (0x1D43C, "M", "i"), - (0x1D43D, "M", "j"), - (0x1D43E, "M", "k"), - (0x1D43F, "M", "l"), - (0x1D440, "M", "m"), - (0x1D441, "M", "n"), - (0x1D442, "M", "o"), - (0x1D443, "M", "p"), - (0x1D444, "M", "q"), - (0x1D445, "M", "r"), - (0x1D446, "M", "s"), - (0x1D447, "M", "t"), - (0x1D448, "M", "u"), - (0x1D449, "M", "v"), - (0x1D44A, "M", "w"), - (0x1D44B, "M", "x"), - (0x1D44C, "M", "y"), - (0x1D44D, "M", "z"), - (0x1D44E, "M", "a"), - (0x1D44F, "M", "b"), - (0x1D450, "M", "c"), - (0x1D451, "M", "d"), - (0x1D452, "M", "e"), - (0x1D453, "M", "f"), - (0x1D454, "M", "g"), - (0x1D455, "X"), - (0x1D456, "M", "i"), - (0x1D457, "M", "j"), - (0x1D458, "M", "k"), - (0x1D459, "M", "l"), - (0x1D45A, "M", "m"), - (0x1D45B, "M", "n"), - (0x1D45C, "M", "o"), - (0x1D45D, "M", "p"), - (0x1D45E, "M", "q"), - (0x1D45F, "M", "r"), - (0x1D460, "M", "s"), - (0x1D461, "M", "t"), - (0x1D462, "M", "u"), - (0x1D463, "M", "v"), - (0x1D464, "M", "w"), - (0x1D465, "M", "x"), - (0x1D466, "M", "y"), - (0x1D467, "M", "z"), - (0x1D468, "M", "a"), - (0x1D469, "M", "b"), - (0x1D46A, "M", "c"), - (0x1D46B, "M", "d"), - (0x1D46C, "M", "e"), - (0x1D46D, "M", "f"), - (0x1D46E, "M", "g"), - (0x1D46F, "M", "h"), - (0x1D470, "M", "i"), - (0x1D471, "M", "j"), - (0x1D472, "M", "k"), - (0x1D473, "M", "l"), - (0x1D474, "M", "m"), - (0x1D475, "M", "n"), - (0x1D476, "M", "o"), - (0x1D477, "M", "p"), - (0x1D478, "M", "q"), - (0x1D479, "M", "r"), - (0x1D47A, "M", "s"), - (0x1D47B, "M", "t"), - (0x1D47C, "M", "u"), - (0x1D47D, "M", "v"), - (0x1D47E, "M", "w"), - (0x1D47F, "M", "x"), - (0x1D480, "M", "y"), - (0x1D481, "M", "z"), - (0x1D482, "M", "a"), - (0x1D483, "M", "b"), - (0x1D484, "M", "c"), - (0x1D485, "M", "d"), - (0x1D486, "M", "e"), - (0x1D487, "M", "f"), - (0x1D488, "M", "g"), - ] - - -def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D489, "M", "h"), - (0x1D48A, "M", "i"), - (0x1D48B, "M", "j"), - (0x1D48C, "M", "k"), - (0x1D48D, "M", "l"), - (0x1D48E, "M", "m"), - (0x1D48F, "M", "n"), - (0x1D490, "M", "o"), - (0x1D491, "M", "p"), - (0x1D492, "M", "q"), - (0x1D493, "M", "r"), - (0x1D494, "M", "s"), - (0x1D495, "M", "t"), - (0x1D496, "M", "u"), - (0x1D497, "M", "v"), - (0x1D498, "M", "w"), - (0x1D499, "M", "x"), - (0x1D49A, "M", "y"), - (0x1D49B, "M", "z"), - (0x1D49C, "M", "a"), - (0x1D49D, "X"), - (0x1D49E, "M", "c"), - (0x1D49F, "M", "d"), - (0x1D4A0, "X"), - (0x1D4A2, "M", "g"), - (0x1D4A3, "X"), - (0x1D4A5, "M", "j"), - (0x1D4A6, "M", "k"), - (0x1D4A7, "X"), - (0x1D4A9, "M", "n"), - (0x1D4AA, "M", "o"), - (0x1D4AB, "M", "p"), - (0x1D4AC, "M", "q"), - (0x1D4AD, "X"), - (0x1D4AE, "M", "s"), - (0x1D4AF, "M", "t"), - (0x1D4B0, "M", "u"), - (0x1D4B1, "M", "v"), - (0x1D4B2, "M", "w"), - (0x1D4B3, "M", "x"), - (0x1D4B4, "M", "y"), - (0x1D4B5, "M", "z"), - (0x1D4B6, "M", "a"), - (0x1D4B7, "M", "b"), - (0x1D4B8, "M", "c"), - (0x1D4B9, "M", "d"), - (0x1D4BA, "X"), - (0x1D4BB, "M", "f"), - (0x1D4BC, "X"), - (0x1D4BD, "M", "h"), - (0x1D4BE, "M", "i"), - (0x1D4BF, "M", "j"), - (0x1D4C0, "M", "k"), - (0x1D4C1, "M", "l"), - (0x1D4C2, "M", "m"), - (0x1D4C3, "M", "n"), - (0x1D4C4, "X"), - (0x1D4C5, "M", "p"), - (0x1D4C6, "M", "q"), - (0x1D4C7, "M", "r"), - (0x1D4C8, "M", "s"), - (0x1D4C9, "M", "t"), - (0x1D4CA, "M", "u"), - (0x1D4CB, "M", "v"), - (0x1D4CC, "M", "w"), - (0x1D4CD, "M", "x"), - (0x1D4CE, "M", "y"), - (0x1D4CF, "M", "z"), - (0x1D4D0, "M", "a"), - (0x1D4D1, "M", "b"), - (0x1D4D2, "M", "c"), - (0x1D4D3, "M", "d"), - (0x1D4D4, "M", "e"), - (0x1D4D5, "M", "f"), - (0x1D4D6, "M", "g"), - (0x1D4D7, "M", "h"), - (0x1D4D8, "M", "i"), - (0x1D4D9, "M", "j"), - (0x1D4DA, "M", "k"), - (0x1D4DB, "M", "l"), - (0x1D4DC, "M", "m"), - (0x1D4DD, "M", "n"), - (0x1D4DE, "M", "o"), - (0x1D4DF, "M", "p"), - (0x1D4E0, "M", "q"), - (0x1D4E1, "M", "r"), - (0x1D4E2, "M", "s"), - (0x1D4E3, "M", "t"), - (0x1D4E4, "M", "u"), - (0x1D4E5, "M", "v"), - (0x1D4E6, "M", "w"), - (0x1D4E7, "M", "x"), - (0x1D4E8, "M", "y"), - (0x1D4E9, "M", "z"), - (0x1D4EA, "M", "a"), - (0x1D4EB, "M", "b"), - (0x1D4EC, "M", "c"), - (0x1D4ED, "M", "d"), - (0x1D4EE, "M", "e"), - (0x1D4EF, "M", "f"), - ] - - -def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D4F0, "M", "g"), - (0x1D4F1, "M", "h"), - (0x1D4F2, "M", "i"), - (0x1D4F3, "M", "j"), - (0x1D4F4, "M", "k"), - (0x1D4F5, "M", "l"), - (0x1D4F6, "M", "m"), - (0x1D4F7, "M", "n"), - (0x1D4F8, "M", "o"), - (0x1D4F9, "M", "p"), - (0x1D4FA, "M", "q"), - (0x1D4FB, "M", "r"), - (0x1D4FC, "M", "s"), - (0x1D4FD, "M", "t"), - (0x1D4FE, "M", "u"), - (0x1D4FF, "M", "v"), - (0x1D500, "M", "w"), - (0x1D501, "M", "x"), - (0x1D502, "M", "y"), - (0x1D503, "M", "z"), - (0x1D504, "M", "a"), - (0x1D505, "M", "b"), - (0x1D506, "X"), - (0x1D507, "M", "d"), - (0x1D508, "M", "e"), - (0x1D509, "M", "f"), - (0x1D50A, "M", "g"), - (0x1D50B, "X"), - (0x1D50D, "M", "j"), - (0x1D50E, "M", "k"), - (0x1D50F, "M", "l"), - (0x1D510, "M", "m"), - (0x1D511, "M", "n"), - (0x1D512, "M", "o"), - (0x1D513, "M", "p"), - (0x1D514, "M", "q"), - (0x1D515, "X"), - (0x1D516, "M", "s"), - (0x1D517, "M", "t"), - (0x1D518, "M", "u"), - (0x1D519, "M", "v"), - (0x1D51A, "M", "w"), - (0x1D51B, "M", "x"), - (0x1D51C, "M", "y"), - (0x1D51D, "X"), - (0x1D51E, "M", "a"), - (0x1D51F, "M", "b"), - (0x1D520, "M", "c"), - (0x1D521, "M", "d"), - (0x1D522, "M", "e"), - (0x1D523, "M", "f"), - (0x1D524, "M", "g"), - (0x1D525, "M", "h"), - (0x1D526, "M", "i"), - (0x1D527, "M", "j"), - (0x1D528, "M", "k"), - (0x1D529, "M", "l"), - (0x1D52A, "M", "m"), - (0x1D52B, "M", "n"), - (0x1D52C, "M", "o"), - (0x1D52D, "M", "p"), - (0x1D52E, "M", "q"), - (0x1D52F, "M", "r"), - (0x1D530, "M", "s"), - (0x1D531, "M", "t"), - (0x1D532, "M", "u"), - (0x1D533, "M", "v"), - (0x1D534, "M", "w"), - (0x1D535, "M", "x"), - (0x1D536, "M", "y"), - (0x1D537, "M", "z"), - (0x1D538, "M", "a"), - (0x1D539, "M", "b"), - (0x1D53A, "X"), - (0x1D53B, "M", "d"), - (0x1D53C, "M", "e"), - (0x1D53D, "M", "f"), - (0x1D53E, "M", "g"), - (0x1D53F, "X"), - (0x1D540, "M", "i"), - (0x1D541, "M", "j"), - (0x1D542, "M", "k"), - (0x1D543, "M", "l"), - (0x1D544, "M", "m"), - (0x1D545, "X"), - (0x1D546, "M", "o"), - (0x1D547, "X"), - (0x1D54A, "M", "s"), - (0x1D54B, "M", "t"), - (0x1D54C, "M", "u"), - (0x1D54D, "M", "v"), - (0x1D54E, "M", "w"), - (0x1D54F, "M", "x"), - (0x1D550, "M", "y"), - (0x1D551, "X"), - (0x1D552, "M", "a"), - (0x1D553, "M", "b"), - (0x1D554, "M", "c"), - (0x1D555, "M", "d"), - (0x1D556, "M", "e"), - ] - - -def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D557, "M", "f"), - (0x1D558, "M", "g"), - (0x1D559, "M", "h"), - (0x1D55A, "M", "i"), - (0x1D55B, "M", "j"), - (0x1D55C, "M", "k"), - (0x1D55D, "M", "l"), - (0x1D55E, "M", "m"), - (0x1D55F, "M", "n"), - (0x1D560, "M", "o"), - (0x1D561, "M", "p"), - (0x1D562, "M", "q"), - (0x1D563, "M", "r"), - (0x1D564, "M", "s"), - (0x1D565, "M", "t"), - (0x1D566, "M", "u"), - (0x1D567, "M", "v"), - (0x1D568, "M", "w"), - (0x1D569, "M", "x"), - (0x1D56A, "M", "y"), - (0x1D56B, "M", "z"), - (0x1D56C, "M", "a"), - (0x1D56D, "M", "b"), - (0x1D56E, "M", "c"), - (0x1D56F, "M", "d"), - (0x1D570, "M", "e"), - (0x1D571, "M", "f"), - (0x1D572, "M", "g"), - (0x1D573, "M", "h"), - (0x1D574, "M", "i"), - (0x1D575, "M", "j"), - (0x1D576, "M", "k"), - (0x1D577, "M", "l"), - (0x1D578, "M", "m"), - (0x1D579, "M", "n"), - (0x1D57A, "M", "o"), - (0x1D57B, "M", "p"), - (0x1D57C, "M", "q"), - (0x1D57D, "M", "r"), - (0x1D57E, "M", "s"), - (0x1D57F, "M", "t"), - (0x1D580, "M", "u"), - (0x1D581, "M", "v"), - (0x1D582, "M", "w"), - (0x1D583, "M", "x"), - (0x1D584, "M", "y"), - (0x1D585, "M", "z"), - (0x1D586, "M", "a"), - (0x1D587, "M", "b"), - (0x1D588, "M", "c"), - (0x1D589, "M", "d"), - (0x1D58A, "M", "e"), - (0x1D58B, "M", "f"), - (0x1D58C, "M", "g"), - (0x1D58D, "M", "h"), - (0x1D58E, "M", "i"), - (0x1D58F, "M", "j"), - (0x1D590, "M", "k"), - (0x1D591, "M", "l"), - (0x1D592, "M", "m"), - (0x1D593, "M", "n"), - (0x1D594, "M", "o"), - (0x1D595, "M", "p"), - (0x1D596, "M", "q"), - (0x1D597, "M", "r"), - (0x1D598, "M", "s"), - (0x1D599, "M", "t"), - (0x1D59A, "M", "u"), - (0x1D59B, "M", "v"), - (0x1D59C, "M", "w"), - (0x1D59D, "M", "x"), - (0x1D59E, "M", "y"), - (0x1D59F, "M", "z"), - (0x1D5A0, "M", "a"), - (0x1D5A1, "M", "b"), - (0x1D5A2, "M", "c"), - (0x1D5A3, "M", "d"), - (0x1D5A4, "M", "e"), - (0x1D5A5, "M", "f"), - (0x1D5A6, "M", "g"), - (0x1D5A7, "M", "h"), - (0x1D5A8, "M", "i"), - (0x1D5A9, "M", "j"), - (0x1D5AA, "M", "k"), - (0x1D5AB, "M", "l"), - (0x1D5AC, "M", "m"), - (0x1D5AD, "M", "n"), - (0x1D5AE, "M", "o"), - (0x1D5AF, "M", "p"), - (0x1D5B0, "M", "q"), - (0x1D5B1, "M", "r"), - (0x1D5B2, "M", "s"), - (0x1D5B3, "M", "t"), - (0x1D5B4, "M", "u"), - (0x1D5B5, "M", "v"), - (0x1D5B6, "M", "w"), - (0x1D5B7, "M", "x"), - (0x1D5B8, "M", "y"), - (0x1D5B9, "M", "z"), - (0x1D5BA, "M", "a"), - ] - - -def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D5BB, "M", "b"), - (0x1D5BC, "M", "c"), - (0x1D5BD, "M", "d"), - (0x1D5BE, "M", "e"), - (0x1D5BF, "M", "f"), - (0x1D5C0, "M", "g"), - (0x1D5C1, "M", "h"), - (0x1D5C2, "M", "i"), - (0x1D5C3, "M", "j"), - (0x1D5C4, "M", "k"), - (0x1D5C5, "M", "l"), - (0x1D5C6, "M", "m"), - (0x1D5C7, "M", "n"), - (0x1D5C8, "M", "o"), - (0x1D5C9, "M", "p"), - (0x1D5CA, "M", "q"), - (0x1D5CB, "M", "r"), - (0x1D5CC, "M", "s"), - (0x1D5CD, "M", "t"), - (0x1D5CE, "M", "u"), - (0x1D5CF, "M", "v"), - (0x1D5D0, "M", "w"), - (0x1D5D1, "M", "x"), - (0x1D5D2, "M", "y"), - (0x1D5D3, "M", "z"), - (0x1D5D4, "M", "a"), - (0x1D5D5, "M", "b"), - (0x1D5D6, "M", "c"), - (0x1D5D7, "M", "d"), - (0x1D5D8, "M", "e"), - (0x1D5D9, "M", "f"), - (0x1D5DA, "M", "g"), - (0x1D5DB, "M", "h"), - (0x1D5DC, "M", "i"), - (0x1D5DD, "M", "j"), - (0x1D5DE, "M", "k"), - (0x1D5DF, "M", "l"), - (0x1D5E0, "M", "m"), - (0x1D5E1, "M", "n"), - (0x1D5E2, "M", "o"), - (0x1D5E3, "M", "p"), - (0x1D5E4, "M", "q"), - (0x1D5E5, "M", "r"), - (0x1D5E6, "M", "s"), - (0x1D5E7, "M", "t"), - (0x1D5E8, "M", "u"), - (0x1D5E9, "M", "v"), - (0x1D5EA, "M", "w"), - (0x1D5EB, "M", "x"), - (0x1D5EC, "M", "y"), - (0x1D5ED, "M", "z"), - (0x1D5EE, "M", "a"), - (0x1D5EF, "M", "b"), - (0x1D5F0, "M", "c"), - (0x1D5F1, "M", "d"), - (0x1D5F2, "M", "e"), - (0x1D5F3, "M", "f"), - (0x1D5F4, "M", "g"), - (0x1D5F5, "M", "h"), - (0x1D5F6, "M", "i"), - (0x1D5F7, "M", "j"), - (0x1D5F8, "M", "k"), - (0x1D5F9, "M", "l"), - (0x1D5FA, "M", "m"), - (0x1D5FB, "M", "n"), - (0x1D5FC, "M", "o"), - (0x1D5FD, "M", "p"), - (0x1D5FE, "M", "q"), - (0x1D5FF, "M", "r"), - (0x1D600, "M", "s"), - (0x1D601, "M", "t"), - (0x1D602, "M", "u"), - (0x1D603, "M", "v"), - (0x1D604, "M", "w"), - (0x1D605, "M", "x"), - (0x1D606, "M", "y"), - (0x1D607, "M", "z"), - (0x1D608, "M", "a"), - (0x1D609, "M", "b"), - (0x1D60A, "M", "c"), - (0x1D60B, "M", "d"), - (0x1D60C, "M", "e"), - (0x1D60D, "M", "f"), - (0x1D60E, "M", "g"), - (0x1D60F, "M", "h"), - (0x1D610, "M", "i"), - (0x1D611, "M", "j"), - (0x1D612, "M", "k"), - (0x1D613, "M", "l"), - (0x1D614, "M", "m"), - (0x1D615, "M", "n"), - (0x1D616, "M", "o"), - (0x1D617, "M", "p"), - (0x1D618, "M", "q"), - (0x1D619, "M", "r"), - (0x1D61A, "M", "s"), - (0x1D61B, "M", "t"), - (0x1D61C, "M", "u"), - (0x1D61D, "M", "v"), - (0x1D61E, "M", "w"), - ] - - -def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D61F, "M", "x"), - (0x1D620, "M", "y"), - (0x1D621, "M", "z"), - (0x1D622, "M", "a"), - (0x1D623, "M", "b"), - (0x1D624, "M", "c"), - (0x1D625, "M", "d"), - (0x1D626, "M", "e"), - (0x1D627, "M", "f"), - (0x1D628, "M", "g"), - (0x1D629, "M", "h"), - (0x1D62A, "M", "i"), - (0x1D62B, "M", "j"), - (0x1D62C, "M", "k"), - (0x1D62D, "M", "l"), - (0x1D62E, "M", "m"), - (0x1D62F, "M", "n"), - (0x1D630, "M", "o"), - (0x1D631, "M", "p"), - (0x1D632, "M", "q"), - (0x1D633, "M", "r"), - (0x1D634, "M", "s"), - (0x1D635, "M", "t"), - (0x1D636, "M", "u"), - (0x1D637, "M", "v"), - (0x1D638, "M", "w"), - (0x1D639, "M", "x"), - (0x1D63A, "M", "y"), - (0x1D63B, "M", "z"), - (0x1D63C, "M", "a"), - (0x1D63D, "M", "b"), - (0x1D63E, "M", "c"), - (0x1D63F, "M", "d"), - (0x1D640, "M", "e"), - (0x1D641, "M", "f"), - (0x1D642, "M", "g"), - (0x1D643, "M", "h"), - (0x1D644, "M", "i"), - (0x1D645, "M", "j"), - (0x1D646, "M", "k"), - (0x1D647, "M", "l"), - (0x1D648, "M", "m"), - (0x1D649, "M", "n"), - (0x1D64A, "M", "o"), - (0x1D64B, "M", "p"), - (0x1D64C, "M", "q"), - (0x1D64D, "M", "r"), - (0x1D64E, "M", "s"), - (0x1D64F, "M", "t"), - (0x1D650, "M", "u"), - (0x1D651, "M", "v"), - (0x1D652, "M", "w"), - (0x1D653, "M", "x"), - (0x1D654, "M", "y"), - (0x1D655, "M", "z"), - (0x1D656, "M", "a"), - (0x1D657, "M", "b"), - (0x1D658, "M", "c"), - (0x1D659, "M", "d"), - (0x1D65A, "M", "e"), - (0x1D65B, "M", "f"), - (0x1D65C, "M", "g"), - (0x1D65D, "M", "h"), - (0x1D65E, "M", "i"), - (0x1D65F, "M", "j"), - (0x1D660, "M", "k"), - (0x1D661, "M", "l"), - (0x1D662, "M", "m"), - (0x1D663, "M", "n"), - (0x1D664, "M", "o"), - (0x1D665, "M", "p"), - (0x1D666, "M", "q"), - (0x1D667, "M", "r"), - (0x1D668, "M", "s"), - (0x1D669, "M", "t"), - (0x1D66A, "M", "u"), - (0x1D66B, "M", "v"), - (0x1D66C, "M", "w"), - (0x1D66D, "M", "x"), - (0x1D66E, "M", "y"), - (0x1D66F, "M", "z"), - (0x1D670, "M", "a"), - (0x1D671, "M", "b"), - (0x1D672, "M", "c"), - (0x1D673, "M", "d"), - (0x1D674, "M", "e"), - (0x1D675, "M", "f"), - (0x1D676, "M", "g"), - (0x1D677, "M", "h"), - (0x1D678, "M", "i"), - (0x1D679, "M", "j"), - (0x1D67A, "M", "k"), - (0x1D67B, "M", "l"), - (0x1D67C, "M", "m"), - (0x1D67D, "M", "n"), - (0x1D67E, "M", "o"), - (0x1D67F, "M", "p"), - (0x1D680, "M", "q"), - (0x1D681, "M", "r"), - (0x1D682, "M", "s"), - ] - - -def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D683, "M", "t"), - (0x1D684, "M", "u"), - (0x1D685, "M", "v"), - (0x1D686, "M", "w"), - (0x1D687, "M", "x"), - (0x1D688, "M", "y"), - (0x1D689, "M", "z"), - (0x1D68A, "M", "a"), - (0x1D68B, "M", "b"), - (0x1D68C, "M", "c"), - (0x1D68D, "M", "d"), - (0x1D68E, "M", "e"), - (0x1D68F, "M", "f"), - (0x1D690, "M", "g"), - (0x1D691, "M", "h"), - (0x1D692, "M", "i"), - (0x1D693, "M", "j"), - (0x1D694, "M", "k"), - (0x1D695, "M", "l"), - (0x1D696, "M", "m"), - (0x1D697, "M", "n"), - (0x1D698, "M", "o"), - (0x1D699, "M", "p"), - (0x1D69A, "M", "q"), - (0x1D69B, "M", "r"), - (0x1D69C, "M", "s"), - (0x1D69D, "M", "t"), - (0x1D69E, "M", "u"), - (0x1D69F, "M", "v"), - (0x1D6A0, "M", "w"), - (0x1D6A1, "M", "x"), - (0x1D6A2, "M", "y"), - (0x1D6A3, "M", "z"), - (0x1D6A4, "M", "ı"), - (0x1D6A5, "M", "ȷ"), - (0x1D6A6, "X"), - (0x1D6A8, "M", "α"), - (0x1D6A9, "M", "β"), - (0x1D6AA, "M", "γ"), - (0x1D6AB, "M", "δ"), - (0x1D6AC, "M", "ε"), - (0x1D6AD, "M", "ζ"), - (0x1D6AE, "M", "η"), - (0x1D6AF, "M", "θ"), - (0x1D6B0, "M", "ι"), - (0x1D6B1, "M", "κ"), - (0x1D6B2, "M", "λ"), - (0x1D6B3, "M", "μ"), - (0x1D6B4, "M", "ν"), - (0x1D6B5, "M", "ξ"), - (0x1D6B6, "M", "ο"), - (0x1D6B7, "M", "π"), - (0x1D6B8, "M", "ρ"), - (0x1D6B9, "M", "θ"), - (0x1D6BA, "M", "σ"), - (0x1D6BB, "M", "τ"), - (0x1D6BC, "M", "υ"), - (0x1D6BD, "M", "φ"), - (0x1D6BE, "M", "χ"), - (0x1D6BF, "M", "ψ"), - (0x1D6C0, "M", "ω"), - (0x1D6C1, "M", "∇"), - (0x1D6C2, "M", "α"), - (0x1D6C3, "M", "β"), - (0x1D6C4, "M", "γ"), - (0x1D6C5, "M", "δ"), - (0x1D6C6, "M", "ε"), - (0x1D6C7, "M", "ζ"), - (0x1D6C8, "M", "η"), - (0x1D6C9, "M", "θ"), - (0x1D6CA, "M", "ι"), - (0x1D6CB, "M", "κ"), - (0x1D6CC, "M", "λ"), - (0x1D6CD, "M", "μ"), - (0x1D6CE, "M", "ν"), - (0x1D6CF, "M", "ξ"), - (0x1D6D0, "M", "ο"), - (0x1D6D1, "M", "π"), - (0x1D6D2, "M", "ρ"), - (0x1D6D3, "M", "σ"), - (0x1D6D5, "M", "τ"), - (0x1D6D6, "M", "υ"), - (0x1D6D7, "M", "φ"), - (0x1D6D8, "M", "χ"), - (0x1D6D9, "M", "ψ"), - (0x1D6DA, "M", "ω"), - (0x1D6DB, "M", "∂"), - (0x1D6DC, "M", "ε"), - (0x1D6DD, "M", "θ"), - (0x1D6DE, "M", "κ"), - (0x1D6DF, "M", "φ"), - (0x1D6E0, "M", "ρ"), - (0x1D6E1, "M", "π"), - (0x1D6E2, "M", "α"), - (0x1D6E3, "M", "β"), - (0x1D6E4, "M", "γ"), - (0x1D6E5, "M", "δ"), - (0x1D6E6, "M", "ε"), - (0x1D6E7, "M", "ζ"), - (0x1D6E8, "M", "η"), - ] - - -def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D6E9, "M", "θ"), - (0x1D6EA, "M", "ι"), - (0x1D6EB, "M", "κ"), - (0x1D6EC, "M", "λ"), - (0x1D6ED, "M", "μ"), - (0x1D6EE, "M", "ν"), - (0x1D6EF, "M", "ξ"), - (0x1D6F0, "M", "ο"), - (0x1D6F1, "M", "π"), - (0x1D6F2, "M", "ρ"), - (0x1D6F3, "M", "θ"), - (0x1D6F4, "M", "σ"), - (0x1D6F5, "M", "τ"), - (0x1D6F6, "M", "υ"), - (0x1D6F7, "M", "φ"), - (0x1D6F8, "M", "χ"), - (0x1D6F9, "M", "ψ"), - (0x1D6FA, "M", "ω"), - (0x1D6FB, "M", "∇"), - (0x1D6FC, "M", "α"), - (0x1D6FD, "M", "β"), - (0x1D6FE, "M", "γ"), - (0x1D6FF, "M", "δ"), - (0x1D700, "M", "ε"), - (0x1D701, "M", "ζ"), - (0x1D702, "M", "η"), - (0x1D703, "M", "θ"), - (0x1D704, "M", "ι"), - (0x1D705, "M", "κ"), - (0x1D706, "M", "λ"), - (0x1D707, "M", "μ"), - (0x1D708, "M", "ν"), - (0x1D709, "M", "ξ"), - (0x1D70A, "M", "ο"), - (0x1D70B, "M", "π"), - (0x1D70C, "M", "ρ"), - (0x1D70D, "M", "σ"), - (0x1D70F, "M", "τ"), - (0x1D710, "M", "υ"), - (0x1D711, "M", "φ"), - (0x1D712, "M", "χ"), - (0x1D713, "M", "ψ"), - (0x1D714, "M", "ω"), - (0x1D715, "M", "∂"), - (0x1D716, "M", "ε"), - (0x1D717, "M", "θ"), - (0x1D718, "M", "κ"), - (0x1D719, "M", "φ"), - (0x1D71A, "M", "ρ"), - (0x1D71B, "M", "π"), - (0x1D71C, "M", "α"), - (0x1D71D, "M", "β"), - (0x1D71E, "M", "γ"), - (0x1D71F, "M", "δ"), - (0x1D720, "M", "ε"), - (0x1D721, "M", "ζ"), - (0x1D722, "M", "η"), - (0x1D723, "M", "θ"), - (0x1D724, "M", "ι"), - (0x1D725, "M", "κ"), - (0x1D726, "M", "λ"), - (0x1D727, "M", "μ"), - (0x1D728, "M", "ν"), - (0x1D729, "M", "ξ"), - (0x1D72A, "M", "ο"), - (0x1D72B, "M", "π"), - (0x1D72C, "M", "ρ"), - (0x1D72D, "M", "θ"), - (0x1D72E, "M", "σ"), - (0x1D72F, "M", "τ"), - (0x1D730, "M", "υ"), - (0x1D731, "M", "φ"), - (0x1D732, "M", "χ"), - (0x1D733, "M", "ψ"), - (0x1D734, "M", "ω"), - (0x1D735, "M", "∇"), - (0x1D736, "M", "α"), - (0x1D737, "M", "β"), - (0x1D738, "M", "γ"), - (0x1D739, "M", "δ"), - (0x1D73A, "M", "ε"), - (0x1D73B, "M", "ζ"), - (0x1D73C, "M", "η"), - (0x1D73D, "M", "θ"), - (0x1D73E, "M", "ι"), - (0x1D73F, "M", "κ"), - (0x1D740, "M", "λ"), - (0x1D741, "M", "μ"), - (0x1D742, "M", "ν"), - (0x1D743, "M", "ξ"), - (0x1D744, "M", "ο"), - (0x1D745, "M", "π"), - (0x1D746, "M", "ρ"), - (0x1D747, "M", "σ"), - (0x1D749, "M", "τ"), - (0x1D74A, "M", "υ"), - (0x1D74B, "M", "φ"), - (0x1D74C, "M", "χ"), - (0x1D74D, "M", "ψ"), - (0x1D74E, "M", "ω"), - ] - - -def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D74F, "M", "∂"), - (0x1D750, "M", "ε"), - (0x1D751, "M", "θ"), - (0x1D752, "M", "κ"), - (0x1D753, "M", "φ"), - (0x1D754, "M", "ρ"), - (0x1D755, "M", "π"), - (0x1D756, "M", "α"), - (0x1D757, "M", "β"), - (0x1D758, "M", "γ"), - (0x1D759, "M", "δ"), - (0x1D75A, "M", "ε"), - (0x1D75B, "M", "ζ"), - (0x1D75C, "M", "η"), - (0x1D75D, "M", "θ"), - (0x1D75E, "M", "ι"), - (0x1D75F, "M", "κ"), - (0x1D760, "M", "λ"), - (0x1D761, "M", "μ"), - (0x1D762, "M", "ν"), - (0x1D763, "M", "ξ"), - (0x1D764, "M", "ο"), - (0x1D765, "M", "π"), - (0x1D766, "M", "ρ"), - (0x1D767, "M", "θ"), - (0x1D768, "M", "σ"), - (0x1D769, "M", "τ"), - (0x1D76A, "M", "υ"), - (0x1D76B, "M", "φ"), - (0x1D76C, "M", "χ"), - (0x1D76D, "M", "ψ"), - (0x1D76E, "M", "ω"), - (0x1D76F, "M", "∇"), - (0x1D770, "M", "α"), - (0x1D771, "M", "β"), - (0x1D772, "M", "γ"), - (0x1D773, "M", "δ"), - (0x1D774, "M", "ε"), - (0x1D775, "M", "ζ"), - (0x1D776, "M", "η"), - (0x1D777, "M", "θ"), - (0x1D778, "M", "ι"), - (0x1D779, "M", "κ"), - (0x1D77A, "M", "λ"), - (0x1D77B, "M", "μ"), - (0x1D77C, "M", "ν"), - (0x1D77D, "M", "ξ"), - (0x1D77E, "M", "ο"), - (0x1D77F, "M", "π"), - (0x1D780, "M", "ρ"), - (0x1D781, "M", "σ"), - (0x1D783, "M", "τ"), - (0x1D784, "M", "υ"), - (0x1D785, "M", "φ"), - (0x1D786, "M", "χ"), - (0x1D787, "M", "ψ"), - (0x1D788, "M", "ω"), - (0x1D789, "M", "∂"), - (0x1D78A, "M", "ε"), - (0x1D78B, "M", "θ"), - (0x1D78C, "M", "κ"), - (0x1D78D, "M", "φ"), - (0x1D78E, "M", "ρ"), - (0x1D78F, "M", "π"), - (0x1D790, "M", "α"), - (0x1D791, "M", "β"), - (0x1D792, "M", "γ"), - (0x1D793, "M", "δ"), - (0x1D794, "M", "ε"), - (0x1D795, "M", "ζ"), - (0x1D796, "M", "η"), - (0x1D797, "M", "θ"), - (0x1D798, "M", "ι"), - (0x1D799, "M", "κ"), - (0x1D79A, "M", "λ"), - (0x1D79B, "M", "μ"), - (0x1D79C, "M", "ν"), - (0x1D79D, "M", "ξ"), - (0x1D79E, "M", "ο"), - (0x1D79F, "M", "π"), - (0x1D7A0, "M", "ρ"), - (0x1D7A1, "M", "θ"), - (0x1D7A2, "M", "σ"), - (0x1D7A3, "M", "τ"), - (0x1D7A4, "M", "υ"), - (0x1D7A5, "M", "φ"), - (0x1D7A6, "M", "χ"), - (0x1D7A7, "M", "ψ"), - (0x1D7A8, "M", "ω"), - (0x1D7A9, "M", "∇"), - (0x1D7AA, "M", "α"), - (0x1D7AB, "M", "β"), - (0x1D7AC, "M", "γ"), - (0x1D7AD, "M", "δ"), - (0x1D7AE, "M", "ε"), - (0x1D7AF, "M", "ζ"), - (0x1D7B0, "M", "η"), - (0x1D7B1, "M", "θ"), - (0x1D7B2, "M", "ι"), - (0x1D7B3, "M", "κ"), - ] - - -def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D7B4, "M", "λ"), - (0x1D7B5, "M", "μ"), - (0x1D7B6, "M", "ν"), - (0x1D7B7, "M", "ξ"), - (0x1D7B8, "M", "ο"), - (0x1D7B9, "M", "π"), - (0x1D7BA, "M", "ρ"), - (0x1D7BB, "M", "σ"), - (0x1D7BD, "M", "τ"), - (0x1D7BE, "M", "υ"), - (0x1D7BF, "M", "φ"), - (0x1D7C0, "M", "χ"), - (0x1D7C1, "M", "ψ"), - (0x1D7C2, "M", "ω"), - (0x1D7C3, "M", "∂"), - (0x1D7C4, "M", "ε"), - (0x1D7C5, "M", "θ"), - (0x1D7C6, "M", "κ"), - (0x1D7C7, "M", "φ"), - (0x1D7C8, "M", "ρ"), - (0x1D7C9, "M", "π"), - (0x1D7CA, "M", "ϝ"), - (0x1D7CC, "X"), - (0x1D7CE, "M", "0"), - (0x1D7CF, "M", "1"), - (0x1D7D0, "M", "2"), - (0x1D7D1, "M", "3"), - (0x1D7D2, "M", "4"), - (0x1D7D3, "M", "5"), - (0x1D7D4, "M", "6"), - (0x1D7D5, "M", "7"), - (0x1D7D6, "M", "8"), - (0x1D7D7, "M", "9"), - (0x1D7D8, "M", "0"), - (0x1D7D9, "M", "1"), - (0x1D7DA, "M", "2"), - (0x1D7DB, "M", "3"), - (0x1D7DC, "M", "4"), - (0x1D7DD, "M", "5"), - (0x1D7DE, "M", "6"), - (0x1D7DF, "M", "7"), - (0x1D7E0, "M", "8"), - (0x1D7E1, "M", "9"), - (0x1D7E2, "M", "0"), - (0x1D7E3, "M", "1"), - (0x1D7E4, "M", "2"), - (0x1D7E5, "M", "3"), - (0x1D7E6, "M", "4"), - (0x1D7E7, "M", "5"), - (0x1D7E8, "M", "6"), - (0x1D7E9, "M", "7"), - (0x1D7EA, "M", "8"), - (0x1D7EB, "M", "9"), - (0x1D7EC, "M", "0"), - (0x1D7ED, "M", "1"), - (0x1D7EE, "M", "2"), - (0x1D7EF, "M", "3"), - (0x1D7F0, "M", "4"), - (0x1D7F1, "M", "5"), - (0x1D7F2, "M", "6"), - (0x1D7F3, "M", "7"), - (0x1D7F4, "M", "8"), - (0x1D7F5, "M", "9"), - (0x1D7F6, "M", "0"), - (0x1D7F7, "M", "1"), - (0x1D7F8, "M", "2"), - (0x1D7F9, "M", "3"), - (0x1D7FA, "M", "4"), - (0x1D7FB, "M", "5"), - (0x1D7FC, "M", "6"), - (0x1D7FD, "M", "7"), - (0x1D7FE, "M", "8"), - (0x1D7FF, "M", "9"), - (0x1D800, "V"), - (0x1DA8C, "X"), - (0x1DA9B, "V"), - (0x1DAA0, "X"), - (0x1DAA1, "V"), - (0x1DAB0, "X"), - (0x1DF00, "V"), - (0x1DF1F, "X"), - (0x1DF25, "V"), - (0x1DF2B, "X"), - (0x1E000, "V"), - (0x1E007, "X"), - (0x1E008, "V"), - (0x1E019, "X"), - (0x1E01B, "V"), - (0x1E022, "X"), - (0x1E023, "V"), - (0x1E025, "X"), - (0x1E026, "V"), - (0x1E02B, "X"), - (0x1E030, "M", "а"), - (0x1E031, "M", "б"), - (0x1E032, "M", "в"), - (0x1E033, "M", "г"), - (0x1E034, "M", "д"), - (0x1E035, "M", "е"), - (0x1E036, "M", "ж"), - ] - - -def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E037, "M", "з"), - (0x1E038, "M", "и"), - (0x1E039, "M", "к"), - (0x1E03A, "M", "л"), - (0x1E03B, "M", "м"), - (0x1E03C, "M", "о"), - (0x1E03D, "M", "п"), - (0x1E03E, "M", "р"), - (0x1E03F, "M", "с"), - (0x1E040, "M", "т"), - (0x1E041, "M", "у"), - (0x1E042, "M", "ф"), - (0x1E043, "M", "х"), - (0x1E044, "M", "ц"), - (0x1E045, "M", "ч"), - (0x1E046, "M", "ш"), - (0x1E047, "M", "ы"), - (0x1E048, "M", "э"), - (0x1E049, "M", "ю"), - (0x1E04A, "M", "ꚉ"), - (0x1E04B, "M", "ә"), - (0x1E04C, "M", "і"), - (0x1E04D, "M", "ј"), - (0x1E04E, "M", "ө"), - (0x1E04F, "M", "ү"), - (0x1E050, "M", "ӏ"), - (0x1E051, "M", "а"), - (0x1E052, "M", "б"), - (0x1E053, "M", "в"), - (0x1E054, "M", "г"), - (0x1E055, "M", "д"), - (0x1E056, "M", "е"), - (0x1E057, "M", "ж"), - (0x1E058, "M", "з"), - (0x1E059, "M", "и"), - (0x1E05A, "M", "к"), - (0x1E05B, "M", "л"), - (0x1E05C, "M", "о"), - (0x1E05D, "M", "п"), - (0x1E05E, "M", "с"), - (0x1E05F, "M", "у"), - (0x1E060, "M", "ф"), - (0x1E061, "M", "х"), - (0x1E062, "M", "ц"), - (0x1E063, "M", "ч"), - (0x1E064, "M", "ш"), - (0x1E065, "M", "ъ"), - (0x1E066, "M", "ы"), - (0x1E067, "M", "ґ"), - (0x1E068, "M", "і"), - (0x1E069, "M", "ѕ"), - (0x1E06A, "M", "џ"), - (0x1E06B, "M", "ҫ"), - (0x1E06C, "M", "ꙑ"), - (0x1E06D, "M", "ұ"), - (0x1E06E, "X"), - (0x1E08F, "V"), - (0x1E090, "X"), - (0x1E100, "V"), - (0x1E12D, "X"), - (0x1E130, "V"), - (0x1E13E, "X"), - (0x1E140, "V"), - (0x1E14A, "X"), - (0x1E14E, "V"), - (0x1E150, "X"), - (0x1E290, "V"), - (0x1E2AF, "X"), - (0x1E2C0, "V"), - (0x1E2FA, "X"), - (0x1E2FF, "V"), - (0x1E300, "X"), - (0x1E4D0, "V"), - (0x1E4FA, "X"), - (0x1E7E0, "V"), - (0x1E7E7, "X"), - (0x1E7E8, "V"), - (0x1E7EC, "X"), - (0x1E7ED, "V"), - (0x1E7EF, "X"), - (0x1E7F0, "V"), - (0x1E7FF, "X"), - (0x1E800, "V"), - (0x1E8C5, "X"), - (0x1E8C7, "V"), - (0x1E8D7, "X"), - (0x1E900, "M", "𞤢"), - (0x1E901, "M", "𞤣"), - (0x1E902, "M", "𞤤"), - (0x1E903, "M", "𞤥"), - (0x1E904, "M", "𞤦"), - (0x1E905, "M", "𞤧"), - (0x1E906, "M", "𞤨"), - (0x1E907, "M", "𞤩"), - (0x1E908, "M", "𞤪"), - (0x1E909, "M", "𞤫"), - (0x1E90A, "M", "𞤬"), - (0x1E90B, "M", "𞤭"), - (0x1E90C, "M", "𞤮"), - (0x1E90D, "M", "𞤯"), - ] - - -def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E90E, "M", "𞤰"), - (0x1E90F, "M", "𞤱"), - (0x1E910, "M", "𞤲"), - (0x1E911, "M", "𞤳"), - (0x1E912, "M", "𞤴"), - (0x1E913, "M", "𞤵"), - (0x1E914, "M", "𞤶"), - (0x1E915, "M", "𞤷"), - (0x1E916, "M", "𞤸"), - (0x1E917, "M", "𞤹"), - (0x1E918, "M", "𞤺"), - (0x1E919, "M", "𞤻"), - (0x1E91A, "M", "𞤼"), - (0x1E91B, "M", "𞤽"), - (0x1E91C, "M", "𞤾"), - (0x1E91D, "M", "𞤿"), - (0x1E91E, "M", "𞥀"), - (0x1E91F, "M", "𞥁"), - (0x1E920, "M", "𞥂"), - (0x1E921, "M", "𞥃"), - (0x1E922, "V"), - (0x1E94C, "X"), - (0x1E950, "V"), - (0x1E95A, "X"), - (0x1E95E, "V"), - (0x1E960, "X"), - (0x1EC71, "V"), - (0x1ECB5, "X"), - (0x1ED01, "V"), - (0x1ED3E, "X"), - (0x1EE00, "M", "ا"), - (0x1EE01, "M", "ب"), - (0x1EE02, "M", "ج"), - (0x1EE03, "M", "د"), - (0x1EE04, "X"), - (0x1EE05, "M", "و"), - (0x1EE06, "M", "ز"), - (0x1EE07, "M", "ح"), - (0x1EE08, "M", "ط"), - (0x1EE09, "M", "ي"), - (0x1EE0A, "M", "ك"), - (0x1EE0B, "M", "ل"), - (0x1EE0C, "M", "م"), - (0x1EE0D, "M", "ن"), - (0x1EE0E, "M", "س"), - (0x1EE0F, "M", "ع"), - (0x1EE10, "M", "ف"), - (0x1EE11, "M", "ص"), - (0x1EE12, "M", "ق"), - (0x1EE13, "M", "ر"), - (0x1EE14, "M", "ش"), - (0x1EE15, "M", "ت"), - (0x1EE16, "M", "ث"), - (0x1EE17, "M", "خ"), - (0x1EE18, "M", "ذ"), - (0x1EE19, "M", "ض"), - (0x1EE1A, "M", "ظ"), - (0x1EE1B, "M", "غ"), - (0x1EE1C, "M", "ٮ"), - (0x1EE1D, "M", "ں"), - (0x1EE1E, "M", "ڡ"), - (0x1EE1F, "M", "ٯ"), - (0x1EE20, "X"), - (0x1EE21, "M", "ب"), - (0x1EE22, "M", "ج"), - (0x1EE23, "X"), - (0x1EE24, "M", "ه"), - (0x1EE25, "X"), - (0x1EE27, "M", "ح"), - (0x1EE28, "X"), - (0x1EE29, "M", "ي"), - (0x1EE2A, "M", "ك"), - (0x1EE2B, "M", "ل"), - (0x1EE2C, "M", "م"), - (0x1EE2D, "M", "ن"), - (0x1EE2E, "M", "س"), - (0x1EE2F, "M", "ع"), - (0x1EE30, "M", "ف"), - (0x1EE31, "M", "ص"), - (0x1EE32, "M", "ق"), - (0x1EE33, "X"), - (0x1EE34, "M", "ش"), - (0x1EE35, "M", "ت"), - (0x1EE36, "M", "ث"), - (0x1EE37, "M", "خ"), - (0x1EE38, "X"), - (0x1EE39, "M", "ض"), - (0x1EE3A, "X"), - (0x1EE3B, "M", "غ"), - (0x1EE3C, "X"), - (0x1EE42, "M", "ج"), - (0x1EE43, "X"), - (0x1EE47, "M", "ح"), - (0x1EE48, "X"), - (0x1EE49, "M", "ي"), - (0x1EE4A, "X"), - (0x1EE4B, "M", "ل"), - (0x1EE4C, "X"), - (0x1EE4D, "M", "ن"), - (0x1EE4E, "M", "س"), - ] - - -def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EE4F, "M", "ع"), - (0x1EE50, "X"), - (0x1EE51, "M", "ص"), - (0x1EE52, "M", "ق"), - (0x1EE53, "X"), - (0x1EE54, "M", "ش"), - (0x1EE55, "X"), - (0x1EE57, "M", "خ"), - (0x1EE58, "X"), - (0x1EE59, "M", "ض"), - (0x1EE5A, "X"), - (0x1EE5B, "M", "غ"), - (0x1EE5C, "X"), - (0x1EE5D, "M", "ں"), - (0x1EE5E, "X"), - (0x1EE5F, "M", "ٯ"), - (0x1EE60, "X"), - (0x1EE61, "M", "ب"), - (0x1EE62, "M", "ج"), - (0x1EE63, "X"), - (0x1EE64, "M", "ه"), - (0x1EE65, "X"), - (0x1EE67, "M", "ح"), - (0x1EE68, "M", "ط"), - (0x1EE69, "M", "ي"), - (0x1EE6A, "M", "ك"), - (0x1EE6B, "X"), - (0x1EE6C, "M", "م"), - (0x1EE6D, "M", "ن"), - (0x1EE6E, "M", "س"), - (0x1EE6F, "M", "ع"), - (0x1EE70, "M", "ف"), - (0x1EE71, "M", "ص"), - (0x1EE72, "M", "ق"), - (0x1EE73, "X"), - (0x1EE74, "M", "ش"), - (0x1EE75, "M", "ت"), - (0x1EE76, "M", "ث"), - (0x1EE77, "M", "خ"), - (0x1EE78, "X"), - (0x1EE79, "M", "ض"), - (0x1EE7A, "M", "ظ"), - (0x1EE7B, "M", "غ"), - (0x1EE7C, "M", "ٮ"), - (0x1EE7D, "X"), - (0x1EE7E, "M", "ڡ"), - (0x1EE7F, "X"), - (0x1EE80, "M", "ا"), - (0x1EE81, "M", "ب"), - (0x1EE82, "M", "ج"), - (0x1EE83, "M", "د"), - (0x1EE84, "M", "ه"), - (0x1EE85, "M", "و"), - (0x1EE86, "M", "ز"), - (0x1EE87, "M", "ح"), - (0x1EE88, "M", "ط"), - (0x1EE89, "M", "ي"), - (0x1EE8A, "X"), - (0x1EE8B, "M", "ل"), - (0x1EE8C, "M", "م"), - (0x1EE8D, "M", "ن"), - (0x1EE8E, "M", "س"), - (0x1EE8F, "M", "ع"), - (0x1EE90, "M", "ف"), - (0x1EE91, "M", "ص"), - (0x1EE92, "M", "ق"), - (0x1EE93, "M", "ر"), - (0x1EE94, "M", "ش"), - (0x1EE95, "M", "ت"), - (0x1EE96, "M", "ث"), - (0x1EE97, "M", "خ"), - (0x1EE98, "M", "ذ"), - (0x1EE99, "M", "ض"), - (0x1EE9A, "M", "ظ"), - (0x1EE9B, "M", "غ"), - (0x1EE9C, "X"), - (0x1EEA1, "M", "ب"), - (0x1EEA2, "M", "ج"), - (0x1EEA3, "M", "د"), - (0x1EEA4, "X"), - (0x1EEA5, "M", "و"), - (0x1EEA6, "M", "ز"), - (0x1EEA7, "M", "ح"), - (0x1EEA8, "M", "ط"), - (0x1EEA9, "M", "ي"), - (0x1EEAA, "X"), - (0x1EEAB, "M", "ل"), - (0x1EEAC, "M", "م"), - (0x1EEAD, "M", "ن"), - (0x1EEAE, "M", "س"), - (0x1EEAF, "M", "ع"), - (0x1EEB0, "M", "ف"), - (0x1EEB1, "M", "ص"), - (0x1EEB2, "M", "ق"), - (0x1EEB3, "M", "ر"), - (0x1EEB4, "M", "ش"), - (0x1EEB5, "M", "ت"), - (0x1EEB6, "M", "ث"), - (0x1EEB7, "M", "خ"), - (0x1EEB8, "M", "ذ"), - ] - - -def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EEB9, "M", "ض"), - (0x1EEBA, "M", "ظ"), - (0x1EEBB, "M", "غ"), - (0x1EEBC, "X"), - (0x1EEF0, "V"), - (0x1EEF2, "X"), - (0x1F000, "V"), - (0x1F02C, "X"), - (0x1F030, "V"), - (0x1F094, "X"), - (0x1F0A0, "V"), - (0x1F0AF, "X"), - (0x1F0B1, "V"), - (0x1F0C0, "X"), - (0x1F0C1, "V"), - (0x1F0D0, "X"), - (0x1F0D1, "V"), - (0x1F0F6, "X"), - (0x1F101, "3", "0,"), - (0x1F102, "3", "1,"), - (0x1F103, "3", "2,"), - (0x1F104, "3", "3,"), - (0x1F105, "3", "4,"), - (0x1F106, "3", "5,"), - (0x1F107, "3", "6,"), - (0x1F108, "3", "7,"), - (0x1F109, "3", "8,"), - (0x1F10A, "3", "9,"), - (0x1F10B, "V"), - (0x1F110, "3", "(a)"), - (0x1F111, "3", "(b)"), - (0x1F112, "3", "(c)"), - (0x1F113, "3", "(d)"), - (0x1F114, "3", "(e)"), - (0x1F115, "3", "(f)"), - (0x1F116, "3", "(g)"), - (0x1F117, "3", "(h)"), - (0x1F118, "3", "(i)"), - (0x1F119, "3", "(j)"), - (0x1F11A, "3", "(k)"), - (0x1F11B, "3", "(l)"), - (0x1F11C, "3", "(m)"), - (0x1F11D, "3", "(n)"), - (0x1F11E, "3", "(o)"), - (0x1F11F, "3", "(p)"), - (0x1F120, "3", "(q)"), - (0x1F121, "3", "(r)"), - (0x1F122, "3", "(s)"), - (0x1F123, "3", "(t)"), - (0x1F124, "3", "(u)"), - (0x1F125, "3", "(v)"), - (0x1F126, "3", "(w)"), - (0x1F127, "3", "(x)"), - (0x1F128, "3", "(y)"), - (0x1F129, "3", "(z)"), - (0x1F12A, "M", "〔s〕"), - (0x1F12B, "M", "c"), - (0x1F12C, "M", "r"), - (0x1F12D, "M", "cd"), - (0x1F12E, "M", "wz"), - (0x1F12F, "V"), - (0x1F130, "M", "a"), - (0x1F131, "M", "b"), - (0x1F132, "M", "c"), - (0x1F133, "M", "d"), - (0x1F134, "M", "e"), - (0x1F135, "M", "f"), - (0x1F136, "M", "g"), - (0x1F137, "M", "h"), - (0x1F138, "M", "i"), - (0x1F139, "M", "j"), - (0x1F13A, "M", "k"), - (0x1F13B, "M", "l"), - (0x1F13C, "M", "m"), - (0x1F13D, "M", "n"), - (0x1F13E, "M", "o"), - (0x1F13F, "M", "p"), - (0x1F140, "M", "q"), - (0x1F141, "M", "r"), - (0x1F142, "M", "s"), - (0x1F143, "M", "t"), - (0x1F144, "M", "u"), - (0x1F145, "M", "v"), - (0x1F146, "M", "w"), - (0x1F147, "M", "x"), - (0x1F148, "M", "y"), - (0x1F149, "M", "z"), - (0x1F14A, "M", "hv"), - (0x1F14B, "M", "mv"), - (0x1F14C, "M", "sd"), - (0x1F14D, "M", "ss"), - (0x1F14E, "M", "ppv"), - (0x1F14F, "M", "wc"), - (0x1F150, "V"), - (0x1F16A, "M", "mc"), - (0x1F16B, "M", "md"), - (0x1F16C, "M", "mr"), - (0x1F16D, "V"), - (0x1F190, "M", "dj"), - (0x1F191, "V"), - ] - - -def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F1AE, "X"), - (0x1F1E6, "V"), - (0x1F200, "M", "ほか"), - (0x1F201, "M", "ココ"), - (0x1F202, "M", "サ"), - (0x1F203, "X"), - (0x1F210, "M", "手"), - (0x1F211, "M", "字"), - (0x1F212, "M", "双"), - (0x1F213, "M", "デ"), - (0x1F214, "M", "二"), - (0x1F215, "M", "多"), - (0x1F216, "M", "解"), - (0x1F217, "M", "天"), - (0x1F218, "M", "交"), - (0x1F219, "M", "映"), - (0x1F21A, "M", "無"), - (0x1F21B, "M", "料"), - (0x1F21C, "M", "前"), - (0x1F21D, "M", "後"), - (0x1F21E, "M", "再"), - (0x1F21F, "M", "新"), - (0x1F220, "M", "初"), - (0x1F221, "M", "終"), - (0x1F222, "M", "生"), - (0x1F223, "M", "販"), - (0x1F224, "M", "声"), - (0x1F225, "M", "吹"), - (0x1F226, "M", "演"), - (0x1F227, "M", "投"), - (0x1F228, "M", "捕"), - (0x1F229, "M", "一"), - (0x1F22A, "M", "三"), - (0x1F22B, "M", "遊"), - (0x1F22C, "M", "左"), - (0x1F22D, "M", "中"), - (0x1F22E, "M", "右"), - (0x1F22F, "M", "指"), - (0x1F230, "M", "走"), - (0x1F231, "M", "打"), - (0x1F232, "M", "禁"), - (0x1F233, "M", "空"), - (0x1F234, "M", "合"), - (0x1F235, "M", "満"), - (0x1F236, "M", "有"), - (0x1F237, "M", "月"), - (0x1F238, "M", "申"), - (0x1F239, "M", "割"), - (0x1F23A, "M", "営"), - (0x1F23B, "M", "配"), - (0x1F23C, "X"), - (0x1F240, "M", "〔本〕"), - (0x1F241, "M", "〔三〕"), - (0x1F242, "M", "〔二〕"), - (0x1F243, "M", "〔安〕"), - (0x1F244, "M", "〔点〕"), - (0x1F245, "M", "〔打〕"), - (0x1F246, "M", "〔盗〕"), - (0x1F247, "M", "〔勝〕"), - (0x1F248, "M", "〔敗〕"), - (0x1F249, "X"), - (0x1F250, "M", "得"), - (0x1F251, "M", "可"), - (0x1F252, "X"), - (0x1F260, "V"), - (0x1F266, "X"), - (0x1F300, "V"), - (0x1F6D8, "X"), - (0x1F6DC, "V"), - (0x1F6ED, "X"), - (0x1F6F0, "V"), - (0x1F6FD, "X"), - (0x1F700, "V"), - (0x1F777, "X"), - (0x1F77B, "V"), - (0x1F7DA, "X"), - (0x1F7E0, "V"), - (0x1F7EC, "X"), - (0x1F7F0, "V"), - (0x1F7F1, "X"), - (0x1F800, "V"), - (0x1F80C, "X"), - (0x1F810, "V"), - (0x1F848, "X"), - (0x1F850, "V"), - (0x1F85A, "X"), - (0x1F860, "V"), - (0x1F888, "X"), - (0x1F890, "V"), - (0x1F8AE, "X"), - (0x1F8B0, "V"), - (0x1F8B2, "X"), - (0x1F900, "V"), - (0x1FA54, "X"), - (0x1FA60, "V"), - (0x1FA6E, "X"), - (0x1FA70, "V"), - (0x1FA7D, "X"), - (0x1FA80, "V"), - (0x1FA89, "X"), - ] - - -def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FA90, "V"), - (0x1FABE, "X"), - (0x1FABF, "V"), - (0x1FAC6, "X"), - (0x1FACE, "V"), - (0x1FADC, "X"), - (0x1FAE0, "V"), - (0x1FAE9, "X"), - (0x1FAF0, "V"), - (0x1FAF9, "X"), - (0x1FB00, "V"), - (0x1FB93, "X"), - (0x1FB94, "V"), - (0x1FBCB, "X"), - (0x1FBF0, "M", "0"), - (0x1FBF1, "M", "1"), - (0x1FBF2, "M", "2"), - (0x1FBF3, "M", "3"), - (0x1FBF4, "M", "4"), - (0x1FBF5, "M", "5"), - (0x1FBF6, "M", "6"), - (0x1FBF7, "M", "7"), - (0x1FBF8, "M", "8"), - (0x1FBF9, "M", "9"), - (0x1FBFA, "X"), - (0x20000, "V"), - (0x2A6E0, "X"), - (0x2A700, "V"), - (0x2B73A, "X"), - (0x2B740, "V"), - (0x2B81E, "X"), - (0x2B820, "V"), - (0x2CEA2, "X"), - (0x2CEB0, "V"), - (0x2EBE1, "X"), - (0x2EBF0, "V"), - (0x2EE5E, "X"), - (0x2F800, "M", "丽"), - (0x2F801, "M", "丸"), - (0x2F802, "M", "乁"), - (0x2F803, "M", "𠄢"), - (0x2F804, "M", "你"), - (0x2F805, "M", "侮"), - (0x2F806, "M", "侻"), - (0x2F807, "M", "倂"), - (0x2F808, "M", "偺"), - (0x2F809, "M", "備"), - (0x2F80A, "M", "僧"), - (0x2F80B, "M", "像"), - (0x2F80C, "M", "㒞"), - (0x2F80D, "M", "𠘺"), - (0x2F80E, "M", "免"), - (0x2F80F, "M", "兔"), - (0x2F810, "M", "兤"), - (0x2F811, "M", "具"), - (0x2F812, "M", "𠔜"), - (0x2F813, "M", "㒹"), - (0x2F814, "M", "內"), - (0x2F815, "M", "再"), - (0x2F816, "M", "𠕋"), - (0x2F817, "M", "冗"), - (0x2F818, "M", "冤"), - (0x2F819, "M", "仌"), - (0x2F81A, "M", "冬"), - (0x2F81B, "M", "况"), - (0x2F81C, "M", "𩇟"), - (0x2F81D, "M", "凵"), - (0x2F81E, "M", "刃"), - (0x2F81F, "M", "㓟"), - (0x2F820, "M", "刻"), - (0x2F821, "M", "剆"), - (0x2F822, "M", "割"), - (0x2F823, "M", "剷"), - (0x2F824, "M", "㔕"), - (0x2F825, "M", "勇"), - (0x2F826, "M", "勉"), - (0x2F827, "M", "勤"), - (0x2F828, "M", "勺"), - (0x2F829, "M", "包"), - (0x2F82A, "M", "匆"), - (0x2F82B, "M", "北"), - (0x2F82C, "M", "卉"), - (0x2F82D, "M", "卑"), - (0x2F82E, "M", "博"), - (0x2F82F, "M", "即"), - (0x2F830, "M", "卽"), - (0x2F831, "M", "卿"), - (0x2F834, "M", "𠨬"), - (0x2F835, "M", "灰"), - (0x2F836, "M", "及"), - (0x2F837, "M", "叟"), - (0x2F838, "M", "𠭣"), - (0x2F839, "M", "叫"), - (0x2F83A, "M", "叱"), - (0x2F83B, "M", "吆"), - (0x2F83C, "M", "咞"), - (0x2F83D, "M", "吸"), - (0x2F83E, "M", "呈"), - (0x2F83F, "M", "周"), - (0x2F840, "M", "咢"), - ] - - -def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F841, "M", "哶"), - (0x2F842, "M", "唐"), - (0x2F843, "M", "啓"), - (0x2F844, "M", "啣"), - (0x2F845, "M", "善"), - (0x2F847, "M", "喙"), - (0x2F848, "M", "喫"), - (0x2F849, "M", "喳"), - (0x2F84A, "M", "嗂"), - (0x2F84B, "M", "圖"), - (0x2F84C, "M", "嘆"), - (0x2F84D, "M", "圗"), - (0x2F84E, "M", "噑"), - (0x2F84F, "M", "噴"), - (0x2F850, "M", "切"), - (0x2F851, "M", "壮"), - (0x2F852, "M", "城"), - (0x2F853, "M", "埴"), - (0x2F854, "M", "堍"), - (0x2F855, "M", "型"), - (0x2F856, "M", "堲"), - (0x2F857, "M", "報"), - (0x2F858, "M", "墬"), - (0x2F859, "M", "𡓤"), - (0x2F85A, "M", "売"), - (0x2F85B, "M", "壷"), - (0x2F85C, "M", "夆"), - (0x2F85D, "M", "多"), - (0x2F85E, "M", "夢"), - (0x2F85F, "M", "奢"), - (0x2F860, "M", "𡚨"), - (0x2F861, "M", "𡛪"), - (0x2F862, "M", "姬"), - (0x2F863, "M", "娛"), - (0x2F864, "M", "娧"), - (0x2F865, "M", "姘"), - (0x2F866, "M", "婦"), - (0x2F867, "M", "㛮"), - (0x2F868, "X"), - (0x2F869, "M", "嬈"), - (0x2F86A, "M", "嬾"), - (0x2F86C, "M", "𡧈"), - (0x2F86D, "M", "寃"), - (0x2F86E, "M", "寘"), - (0x2F86F, "M", "寧"), - (0x2F870, "M", "寳"), - (0x2F871, "M", "𡬘"), - (0x2F872, "M", "寿"), - (0x2F873, "M", "将"), - (0x2F874, "X"), - (0x2F875, "M", "尢"), - (0x2F876, "M", "㞁"), - (0x2F877, "M", "屠"), - (0x2F878, "M", "屮"), - (0x2F879, "M", "峀"), - (0x2F87A, "M", "岍"), - (0x2F87B, "M", "𡷤"), - (0x2F87C, "M", "嵃"), - (0x2F87D, "M", "𡷦"), - (0x2F87E, "M", "嵮"), - (0x2F87F, "M", "嵫"), - (0x2F880, "M", "嵼"), - (0x2F881, "M", "巡"), - (0x2F882, "M", "巢"), - (0x2F883, "M", "㠯"), - (0x2F884, "M", "巽"), - (0x2F885, "M", "帨"), - (0x2F886, "M", "帽"), - (0x2F887, "M", "幩"), - (0x2F888, "M", "㡢"), - (0x2F889, "M", "𢆃"), - (0x2F88A, "M", "㡼"), - (0x2F88B, "M", "庰"), - (0x2F88C, "M", "庳"), - (0x2F88D, "M", "庶"), - (0x2F88E, "M", "廊"), - (0x2F88F, "M", "𪎒"), - (0x2F890, "M", "廾"), - (0x2F891, "M", "𢌱"), - (0x2F893, "M", "舁"), - (0x2F894, "M", "弢"), - (0x2F896, "M", "㣇"), - (0x2F897, "M", "𣊸"), - (0x2F898, "M", "𦇚"), - (0x2F899, "M", "形"), - (0x2F89A, "M", "彫"), - (0x2F89B, "M", "㣣"), - (0x2F89C, "M", "徚"), - (0x2F89D, "M", "忍"), - (0x2F89E, "M", "志"), - (0x2F89F, "M", "忹"), - (0x2F8A0, "M", "悁"), - (0x2F8A1, "M", "㤺"), - (0x2F8A2, "M", "㤜"), - (0x2F8A3, "M", "悔"), - (0x2F8A4, "M", "𢛔"), - (0x2F8A5, "M", "惇"), - (0x2F8A6, "M", "慈"), - (0x2F8A7, "M", "慌"), - (0x2F8A8, "M", "慎"), - ] - - -def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F8A9, "M", "慌"), - (0x2F8AA, "M", "慺"), - (0x2F8AB, "M", "憎"), - (0x2F8AC, "M", "憲"), - (0x2F8AD, "M", "憤"), - (0x2F8AE, "M", "憯"), - (0x2F8AF, "M", "懞"), - (0x2F8B0, "M", "懲"), - (0x2F8B1, "M", "懶"), - (0x2F8B2, "M", "成"), - (0x2F8B3, "M", "戛"), - (0x2F8B4, "M", "扝"), - (0x2F8B5, "M", "抱"), - (0x2F8B6, "M", "拔"), - (0x2F8B7, "M", "捐"), - (0x2F8B8, "M", "𢬌"), - (0x2F8B9, "M", "挽"), - (0x2F8BA, "M", "拼"), - (0x2F8BB, "M", "捨"), - (0x2F8BC, "M", "掃"), - (0x2F8BD, "M", "揤"), - (0x2F8BE, "M", "𢯱"), - (0x2F8BF, "M", "搢"), - (0x2F8C0, "M", "揅"), - (0x2F8C1, "M", "掩"), - (0x2F8C2, "M", "㨮"), - (0x2F8C3, "M", "摩"), - (0x2F8C4, "M", "摾"), - (0x2F8C5, "M", "撝"), - (0x2F8C6, "M", "摷"), - (0x2F8C7, "M", "㩬"), - (0x2F8C8, "M", "敏"), - (0x2F8C9, "M", "敬"), - (0x2F8CA, "M", "𣀊"), - (0x2F8CB, "M", "旣"), - (0x2F8CC, "M", "書"), - (0x2F8CD, "M", "晉"), - (0x2F8CE, "M", "㬙"), - (0x2F8CF, "M", "暑"), - (0x2F8D0, "M", "㬈"), - (0x2F8D1, "M", "㫤"), - (0x2F8D2, "M", "冒"), - (0x2F8D3, "M", "冕"), - (0x2F8D4, "M", "最"), - (0x2F8D5, "M", "暜"), - (0x2F8D6, "M", "肭"), - (0x2F8D7, "M", "䏙"), - (0x2F8D8, "M", "朗"), - (0x2F8D9, "M", "望"), - (0x2F8DA, "M", "朡"), - (0x2F8DB, "M", "杞"), - (0x2F8DC, "M", "杓"), - (0x2F8DD, "M", "𣏃"), - (0x2F8DE, "M", "㭉"), - (0x2F8DF, "M", "柺"), - (0x2F8E0, "M", "枅"), - (0x2F8E1, "M", "桒"), - (0x2F8E2, "M", "梅"), - (0x2F8E3, "M", "𣑭"), - (0x2F8E4, "M", "梎"), - (0x2F8E5, "M", "栟"), - (0x2F8E6, "M", "椔"), - (0x2F8E7, "M", "㮝"), - (0x2F8E8, "M", "楂"), - (0x2F8E9, "M", "榣"), - (0x2F8EA, "M", "槪"), - (0x2F8EB, "M", "檨"), - (0x2F8EC, "M", "𣚣"), - (0x2F8ED, "M", "櫛"), - (0x2F8EE, "M", "㰘"), - (0x2F8EF, "M", "次"), - (0x2F8F0, "M", "𣢧"), - (0x2F8F1, "M", "歔"), - (0x2F8F2, "M", "㱎"), - (0x2F8F3, "M", "歲"), - (0x2F8F4, "M", "殟"), - (0x2F8F5, "M", "殺"), - (0x2F8F6, "M", "殻"), - (0x2F8F7, "M", "𣪍"), - (0x2F8F8, "M", "𡴋"), - (0x2F8F9, "M", "𣫺"), - (0x2F8FA, "M", "汎"), - (0x2F8FB, "M", "𣲼"), - (0x2F8FC, "M", "沿"), - (0x2F8FD, "M", "泍"), - (0x2F8FE, "M", "汧"), - (0x2F8FF, "M", "洖"), - (0x2F900, "M", "派"), - (0x2F901, "M", "海"), - (0x2F902, "M", "流"), - (0x2F903, "M", "浩"), - (0x2F904, "M", "浸"), - (0x2F905, "M", "涅"), - (0x2F906, "M", "𣴞"), - (0x2F907, "M", "洴"), - (0x2F908, "M", "港"), - (0x2F909, "M", "湮"), - (0x2F90A, "M", "㴳"), - (0x2F90B, "M", "滋"), - (0x2F90C, "M", "滇"), - ] - - -def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F90D, "M", "𣻑"), - (0x2F90E, "M", "淹"), - (0x2F90F, "M", "潮"), - (0x2F910, "M", "𣽞"), - (0x2F911, "M", "𣾎"), - (0x2F912, "M", "濆"), - (0x2F913, "M", "瀹"), - (0x2F914, "M", "瀞"), - (0x2F915, "M", "瀛"), - (0x2F916, "M", "㶖"), - (0x2F917, "M", "灊"), - (0x2F918, "M", "災"), - (0x2F919, "M", "灷"), - (0x2F91A, "M", "炭"), - (0x2F91B, "M", "𠔥"), - (0x2F91C, "M", "煅"), - (0x2F91D, "M", "𤉣"), - (0x2F91E, "M", "熜"), - (0x2F91F, "X"), - (0x2F920, "M", "爨"), - (0x2F921, "M", "爵"), - (0x2F922, "M", "牐"), - (0x2F923, "M", "𤘈"), - (0x2F924, "M", "犀"), - (0x2F925, "M", "犕"), - (0x2F926, "M", "𤜵"), - (0x2F927, "M", "𤠔"), - (0x2F928, "M", "獺"), - (0x2F929, "M", "王"), - (0x2F92A, "M", "㺬"), - (0x2F92B, "M", "玥"), - (0x2F92C, "M", "㺸"), - (0x2F92E, "M", "瑇"), - (0x2F92F, "M", "瑜"), - (0x2F930, "M", "瑱"), - (0x2F931, "M", "璅"), - (0x2F932, "M", "瓊"), - (0x2F933, "M", "㼛"), - (0x2F934, "M", "甤"), - (0x2F935, "M", "𤰶"), - (0x2F936, "M", "甾"), - (0x2F937, "M", "𤲒"), - (0x2F938, "M", "異"), - (0x2F939, "M", "𢆟"), - (0x2F93A, "M", "瘐"), - (0x2F93B, "M", "𤾡"), - (0x2F93C, "M", "𤾸"), - (0x2F93D, "M", "𥁄"), - (0x2F93E, "M", "㿼"), - (0x2F93F, "M", "䀈"), - (0x2F940, "M", "直"), - (0x2F941, "M", "𥃳"), - (0x2F942, "M", "𥃲"), - (0x2F943, "M", "𥄙"), - (0x2F944, "M", "𥄳"), - (0x2F945, "M", "眞"), - (0x2F946, "M", "真"), - (0x2F948, "M", "睊"), - (0x2F949, "M", "䀹"), - (0x2F94A, "M", "瞋"), - (0x2F94B, "M", "䁆"), - (0x2F94C, "M", "䂖"), - (0x2F94D, "M", "𥐝"), - (0x2F94E, "M", "硎"), - (0x2F94F, "M", "碌"), - (0x2F950, "M", "磌"), - (0x2F951, "M", "䃣"), - (0x2F952, "M", "𥘦"), - (0x2F953, "M", "祖"), - (0x2F954, "M", "𥚚"), - (0x2F955, "M", "𥛅"), - (0x2F956, "M", "福"), - (0x2F957, "M", "秫"), - (0x2F958, "M", "䄯"), - (0x2F959, "M", "穀"), - (0x2F95A, "M", "穊"), - (0x2F95B, "M", "穏"), - (0x2F95C, "M", "𥥼"), - (0x2F95D, "M", "𥪧"), - (0x2F95F, "X"), - (0x2F960, "M", "䈂"), - (0x2F961, "M", "𥮫"), - (0x2F962, "M", "篆"), - (0x2F963, "M", "築"), - (0x2F964, "M", "䈧"), - (0x2F965, "M", "𥲀"), - (0x2F966, "M", "糒"), - (0x2F967, "M", "䊠"), - (0x2F968, "M", "糨"), - (0x2F969, "M", "糣"), - (0x2F96A, "M", "紀"), - (0x2F96B, "M", "𥾆"), - (0x2F96C, "M", "絣"), - (0x2F96D, "M", "䌁"), - (0x2F96E, "M", "緇"), - (0x2F96F, "M", "縂"), - (0x2F970, "M", "繅"), - (0x2F971, "M", "䌴"), - (0x2F972, "M", "𦈨"), - (0x2F973, "M", "𦉇"), - ] - - -def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F974, "M", "䍙"), - (0x2F975, "M", "𦋙"), - (0x2F976, "M", "罺"), - (0x2F977, "M", "𦌾"), - (0x2F978, "M", "羕"), - (0x2F979, "M", "翺"), - (0x2F97A, "M", "者"), - (0x2F97B, "M", "𦓚"), - (0x2F97C, "M", "𦔣"), - (0x2F97D, "M", "聠"), - (0x2F97E, "M", "𦖨"), - (0x2F97F, "M", "聰"), - (0x2F980, "M", "𣍟"), - (0x2F981, "M", "䏕"), - (0x2F982, "M", "育"), - (0x2F983, "M", "脃"), - (0x2F984, "M", "䐋"), - (0x2F985, "M", "脾"), - (0x2F986, "M", "媵"), - (0x2F987, "M", "𦞧"), - (0x2F988, "M", "𦞵"), - (0x2F989, "M", "𣎓"), - (0x2F98A, "M", "𣎜"), - (0x2F98B, "M", "舁"), - (0x2F98C, "M", "舄"), - (0x2F98D, "M", "辞"), - (0x2F98E, "M", "䑫"), - (0x2F98F, "M", "芑"), - (0x2F990, "M", "芋"), - (0x2F991, "M", "芝"), - (0x2F992, "M", "劳"), - (0x2F993, "M", "花"), - (0x2F994, "M", "芳"), - (0x2F995, "M", "芽"), - (0x2F996, "M", "苦"), - (0x2F997, "M", "𦬼"), - (0x2F998, "M", "若"), - (0x2F999, "M", "茝"), - (0x2F99A, "M", "荣"), - (0x2F99B, "M", "莭"), - (0x2F99C, "M", "茣"), - (0x2F99D, "M", "莽"), - (0x2F99E, "M", "菧"), - (0x2F99F, "M", "著"), - (0x2F9A0, "M", "荓"), - (0x2F9A1, "M", "菊"), - (0x2F9A2, "M", "菌"), - (0x2F9A3, "M", "菜"), - (0x2F9A4, "M", "𦰶"), - (0x2F9A5, "M", "𦵫"), - (0x2F9A6, "M", "𦳕"), - (0x2F9A7, "M", "䔫"), - (0x2F9A8, "M", "蓱"), - (0x2F9A9, "M", "蓳"), - (0x2F9AA, "M", "蔖"), - (0x2F9AB, "M", "𧏊"), - (0x2F9AC, "M", "蕤"), - (0x2F9AD, "M", "𦼬"), - (0x2F9AE, "M", "䕝"), - (0x2F9AF, "M", "䕡"), - (0x2F9B0, "M", "𦾱"), - (0x2F9B1, "M", "𧃒"), - (0x2F9B2, "M", "䕫"), - (0x2F9B3, "M", "虐"), - (0x2F9B4, "M", "虜"), - (0x2F9B5, "M", "虧"), - (0x2F9B6, "M", "虩"), - (0x2F9B7, "M", "蚩"), - (0x2F9B8, "M", "蚈"), - (0x2F9B9, "M", "蜎"), - (0x2F9BA, "M", "蛢"), - (0x2F9BB, "M", "蝹"), - (0x2F9BC, "M", "蜨"), - (0x2F9BD, "M", "蝫"), - (0x2F9BE, "M", "螆"), - (0x2F9BF, "X"), - (0x2F9C0, "M", "蟡"), - (0x2F9C1, "M", "蠁"), - (0x2F9C2, "M", "䗹"), - (0x2F9C3, "M", "衠"), - (0x2F9C4, "M", "衣"), - (0x2F9C5, "M", "𧙧"), - (0x2F9C6, "M", "裗"), - (0x2F9C7, "M", "裞"), - (0x2F9C8, "M", "䘵"), - (0x2F9C9, "M", "裺"), - (0x2F9CA, "M", "㒻"), - (0x2F9CB, "M", "𧢮"), - (0x2F9CC, "M", "𧥦"), - (0x2F9CD, "M", "䚾"), - (0x2F9CE, "M", "䛇"), - (0x2F9CF, "M", "誠"), - (0x2F9D0, "M", "諭"), - (0x2F9D1, "M", "變"), - (0x2F9D2, "M", "豕"), - (0x2F9D3, "M", "𧲨"), - (0x2F9D4, "M", "貫"), - (0x2F9D5, "M", "賁"), - (0x2F9D6, "M", "贛"), - (0x2F9D7, "M", "起"), - ] - - -def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F9D8, "M", "𧼯"), - (0x2F9D9, "M", "𠠄"), - (0x2F9DA, "M", "跋"), - (0x2F9DB, "M", "趼"), - (0x2F9DC, "M", "跰"), - (0x2F9DD, "M", "𠣞"), - (0x2F9DE, "M", "軔"), - (0x2F9DF, "M", "輸"), - (0x2F9E0, "M", "𨗒"), - (0x2F9E1, "M", "𨗭"), - (0x2F9E2, "M", "邔"), - (0x2F9E3, "M", "郱"), - (0x2F9E4, "M", "鄑"), - (0x2F9E5, "M", "𨜮"), - (0x2F9E6, "M", "鄛"), - (0x2F9E7, "M", "鈸"), - (0x2F9E8, "M", "鋗"), - (0x2F9E9, "M", "鋘"), - (0x2F9EA, "M", "鉼"), - (0x2F9EB, "M", "鏹"), - (0x2F9EC, "M", "鐕"), - (0x2F9ED, "M", "𨯺"), - (0x2F9EE, "M", "開"), - (0x2F9EF, "M", "䦕"), - (0x2F9F0, "M", "閷"), - (0x2F9F1, "M", "𨵷"), - (0x2F9F2, "M", "䧦"), - (0x2F9F3, "M", "雃"), - (0x2F9F4, "M", "嶲"), - (0x2F9F5, "M", "霣"), - (0x2F9F6, "M", "𩅅"), - (0x2F9F7, "M", "𩈚"), - (0x2F9F8, "M", "䩮"), - (0x2F9F9, "M", "䩶"), - (0x2F9FA, "M", "韠"), - (0x2F9FB, "M", "𩐊"), - (0x2F9FC, "M", "䪲"), - (0x2F9FD, "M", "𩒖"), - (0x2F9FE, "M", "頋"), - (0x2FA00, "M", "頩"), - (0x2FA01, "M", "𩖶"), - (0x2FA02, "M", "飢"), - (0x2FA03, "M", "䬳"), - (0x2FA04, "M", "餩"), - (0x2FA05, "M", "馧"), - (0x2FA06, "M", "駂"), - (0x2FA07, "M", "駾"), - (0x2FA08, "M", "䯎"), - (0x2FA09, "M", "𩬰"), - (0x2FA0A, "M", "鬒"), - (0x2FA0B, "M", "鱀"), - (0x2FA0C, "M", "鳽"), - (0x2FA0D, "M", "䳎"), - (0x2FA0E, "M", "䳭"), - (0x2FA0F, "M", "鵧"), - (0x2FA10, "M", "𪃎"), - (0x2FA11, "M", "䳸"), - (0x2FA12, "M", "𪄅"), - (0x2FA13, "M", "𪈎"), - (0x2FA14, "M", "𪊑"), - (0x2FA15, "M", "麻"), - (0x2FA16, "M", "䵖"), - (0x2FA17, "M", "黹"), - (0x2FA18, "M", "黾"), - (0x2FA19, "M", "鼅"), - (0x2FA1A, "M", "鼏"), - (0x2FA1B, "M", "鼖"), - (0x2FA1C, "M", "鼻"), - (0x2FA1D, "M", "𪘀"), - (0x2FA1E, "X"), - (0x30000, "V"), - (0x3134B, "X"), - (0x31350, "V"), - (0x323B0, "X"), - (0xE0100, "I"), - (0xE01F0, "X"), - ] - - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() - + _seg_73() - + _seg_74() - + _seg_75() - + _seg_76() - + _seg_77() - + _seg_78() - + _seg_79() - + _seg_80() - + _seg_81() -) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/lib/python3.12/site-packages/jose/__init__.py b/venv/lib/python3.12/site-packages/jose/__init__.py deleted file mode 100644 index 7e53b60c..00000000 --- a/venv/lib/python3.12/site-packages/jose/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -__version__ = "3.5.0" -__author__ = "Michael Davis" -__license__ = "MIT" -__copyright__ = "Copyright 2016 Michael Davis" - - -from .exceptions import ExpiredSignatureError # noqa: F401 -from .exceptions import JOSEError # noqa: F401 -from .exceptions import JWSError # noqa: F401 -from .exceptions import JWTError # noqa: F401 diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index c9351f2a..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/constants.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/constants.cpython-312.pyc deleted file mode 100644 index 9a5b3f69..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/constants.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/exceptions.cpython-312.pyc deleted file mode 100644 index cb9163cd..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/exceptions.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jwe.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jwe.cpython-312.pyc deleted file mode 100644 index d21db8f0..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/jwe.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jwk.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jwk.cpython-312.pyc deleted file mode 100644 index 147cfe7d..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/jwk.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jws.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jws.cpython-312.pyc deleted file mode 100644 index 472388ab..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/jws.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/jwt.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/jwt.cpython-312.pyc deleted file mode 100644 index 6954f8c8..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/jwt.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/__pycache__/utils.cpython-312.pyc deleted file mode 100644 index 1cbf101f..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/__pycache__/utils.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__init__.py b/venv/lib/python3.12/site-packages/jose/backends/__init__.py deleted file mode 100644 index 99189691..00000000 --- a/venv/lib/python3.12/site-packages/jose/backends/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -from jose.backends.native import get_random_bytes # noqa: F401 - -try: - from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey # noqa: F401 -except ImportError: - try: - from jose.backends.rsa_backend import RSAKey # noqa: F401 - except ImportError: - RSAKey = None - -try: - from jose.backends.cryptography_backend import CryptographyECKey as ECKey # noqa: F401 -except ImportError: - from jose.backends.ecdsa_backend import ECDSAECKey as ECKey # noqa: F401 - -try: - from jose.backends.cryptography_backend import CryptographyAESKey as AESKey # noqa: F401 -except ImportError: - AESKey = None - -try: - from jose.backends.cryptography_backend import CryptographyHMACKey as HMACKey # noqa: F401 -except ImportError: - from jose.backends.native import HMACKey # noqa: F401 - -from .base import DIRKey # noqa: F401 diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 1ed0059b..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/_asn1.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/_asn1.cpython-312.pyc deleted file mode 100644 index b437d53e..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/_asn1.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/base.cpython-312.pyc deleted file mode 100644 index 8cc08972..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/base.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-312.pyc deleted file mode 100644 index 6081059a..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/cryptography_backend.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-312.pyc deleted file mode 100644 index 81582566..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/ecdsa_backend.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/native.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/native.cpython-312.pyc deleted file mode 100644 index 0fff5c58..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/native.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/rsa_backend.cpython-312.pyc b/venv/lib/python3.12/site-packages/jose/backends/__pycache__/rsa_backend.cpython-312.pyc deleted file mode 100644 index 7d470a43..00000000 Binary files a/venv/lib/python3.12/site-packages/jose/backends/__pycache__/rsa_backend.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/jose/backends/_asn1.py b/venv/lib/python3.12/site-packages/jose/backends/_asn1.py deleted file mode 100644 index 87e3df1b..00000000 --- a/venv/lib/python3.12/site-packages/jose/backends/_asn1.py +++ /dev/null @@ -1,84 +0,0 @@ -"""ASN1 encoding helpers for converting between PKCS1 and PKCS8. - -Required by rsa_backend but not cryptography_backend. -""" - -from pyasn1.codec.der import decoder, encoder -from pyasn1.type import namedtype, univ - -RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1" - - -class RsaAlgorithmIdentifier(univ.Sequence): - """ASN1 structure for recording RSA PrivateKeyAlgorithm identifiers.""" - - componentType = namedtype.NamedTypes( - namedtype.NamedType("rsaEncryption", univ.ObjectIdentifier()), namedtype.NamedType("parameters", univ.Null()) - ) - - -class PKCS8PrivateKey(univ.Sequence): - """ASN1 structure for recording PKCS8 private keys.""" - - componentType = namedtype.NamedTypes( - namedtype.NamedType("version", univ.Integer()), - namedtype.NamedType("privateKeyAlgorithm", RsaAlgorithmIdentifier()), - namedtype.NamedType("privateKey", univ.OctetString()), - ) - - -class PublicKeyInfo(univ.Sequence): - """ASN1 structure for recording PKCS8 public keys.""" - - componentType = namedtype.NamedTypes( - namedtype.NamedType("algorithm", RsaAlgorithmIdentifier()), namedtype.NamedType("publicKey", univ.BitString()) - ) - - -def rsa_private_key_pkcs8_to_pkcs1(pkcs8_key): - """Convert a PKCS8-encoded RSA private key to PKCS1.""" - decoded_values = decoder.decode(pkcs8_key, asn1Spec=PKCS8PrivateKey()) - - try: - decoded_key = decoded_values[0] - except IndexError: - raise ValueError("Invalid private key encoding") - - return decoded_key["privateKey"] - - -def rsa_private_key_pkcs1_to_pkcs8(pkcs1_key): - """Convert a PKCS1-encoded RSA private key to PKCS8.""" - algorithm = RsaAlgorithmIdentifier() - algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID - - pkcs8_key = PKCS8PrivateKey() - pkcs8_key["version"] = 0 - pkcs8_key["privateKeyAlgorithm"] = algorithm - pkcs8_key["privateKey"] = pkcs1_key - - return encoder.encode(pkcs8_key) - - -def rsa_public_key_pkcs1_to_pkcs8(pkcs1_key): - """Convert a PKCS1-encoded RSA private key to PKCS8.""" - algorithm = RsaAlgorithmIdentifier() - algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID - - pkcs8_key = PublicKeyInfo() - pkcs8_key["algorithm"] = algorithm - pkcs8_key["publicKey"] = univ.BitString.fromOctetString(pkcs1_key) - - return encoder.encode(pkcs8_key) - - -def rsa_public_key_pkcs8_to_pkcs1(pkcs8_key): - """Convert a PKCS8-encoded RSA private key to PKCS1.""" - decoded_values = decoder.decode(pkcs8_key, asn1Spec=PublicKeyInfo()) - - try: - decoded_key = decoded_values[0] - except IndexError: - raise ValueError("Invalid public key encoding.") - - return decoded_key["publicKey"].asOctets() diff --git a/venv/lib/python3.12/site-packages/jose/backends/base.py b/venv/lib/python3.12/site-packages/jose/backends/base.py deleted file mode 100644 index b000a525..00000000 --- a/venv/lib/python3.12/site-packages/jose/backends/base.py +++ /dev/null @@ -1,89 +0,0 @@ -from ..utils import base64url_encode, ensure_binary - - -class Key: - """ - A simple interface for implementing JWK keys. - """ - - def __init__(self, key, algorithm): - pass - - def sign(self, msg): - raise NotImplementedError() - - def verify(self, msg, sig): - raise NotImplementedError() - - def public_key(self): - raise NotImplementedError() - - def to_pem(self): - raise NotImplementedError() - - def to_dict(self): - raise NotImplementedError() - - def encrypt(self, plain_text, aad=None): - """ - Encrypt the plain text and generate an auth tag if appropriate - - Args: - plain_text (bytes): Data to encrypt - aad (bytes, optional): Authenticated Additional Data if key's algorithm supports auth mode - - Returns: - (bytes, bytes, bytes): IV, cipher text, and auth tag - """ - raise NotImplementedError() - - def decrypt(self, cipher_text, iv=None, aad=None, tag=None): - """ - Decrypt the cipher text and validate the auth tag if present - Args: - cipher_text (bytes): Cipher text to decrypt - iv (bytes): IV if block mode - aad (bytes): Additional Authenticated Data to verify if auth mode - tag (bytes): Authentication tag if auth mode - - Returns: - bytes: Decrypted value - """ - raise NotImplementedError() - - def wrap_key(self, key_data): - """ - Wrap the the plain text key data - - Args: - key_data (bytes): Key data to wrap - - Returns: - bytes: Wrapped key - """ - raise NotImplementedError() - - def unwrap_key(self, wrapped_key): - """ - Unwrap the the wrapped key data - - Args: - wrapped_key (bytes): Wrapped key data to unwrap - - Returns: - bytes: Unwrapped key - """ - raise NotImplementedError() - - -class DIRKey(Key): - def __init__(self, key_data, algorithm): - self._key = ensure_binary(key_data) - self._alg = algorithm - - def to_dict(self): - return { - "alg": self._alg, - "kty": "oct", - "k": base64url_encode(self._key), - } diff --git a/venv/lib/python3.12/site-packages/jose/backends/cryptography_backend.py b/venv/lib/python3.12/site-packages/jose/backends/cryptography_backend.py deleted file mode 100644 index ec836b4c..00000000 --- a/venv/lib/python3.12/site-packages/jose/backends/cryptography_backend.py +++ /dev/null @@ -1,586 +0,0 @@ -import math -import warnings - -from cryptography.exceptions import InvalidSignature, InvalidTag -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes, hmac, serialization -from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa -from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature, encode_dss_signature -from cryptography.hazmat.primitives.ciphers import Cipher, aead, algorithms, modes -from cryptography.hazmat.primitives.keywrap import InvalidUnwrap, aes_key_unwrap, aes_key_wrap -from cryptography.hazmat.primitives.padding import PKCS7 -from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key -from cryptography.utils import int_to_bytes -from cryptography.x509 import load_pem_x509_certificate - -from ..constants import ALGORITHMS -from ..exceptions import JWEError, JWKError -from ..utils import ( - base64_to_long, - base64url_decode, - base64url_encode, - ensure_binary, - is_pem_format, - is_ssh_key, - long_to_base64, -) -from . import get_random_bytes -from .base import Key - -_binding = None - - -class CryptographyECKey(Key): - SHA256 = hashes.SHA256 - SHA384 = hashes.SHA384 - SHA512 = hashes.SHA512 - - def __init__(self, key, algorithm, cryptography_backend=default_backend): - if algorithm not in ALGORITHMS.EC: - raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) - - self.hash_alg = { - ALGORITHMS.ES256: self.SHA256, - ALGORITHMS.ES384: self.SHA384, - ALGORITHMS.ES512: self.SHA512, - }.get(algorithm) - self._algorithm = algorithm - - self.cryptography_backend = cryptography_backend - - if hasattr(key, "public_bytes") or hasattr(key, "private_bytes"): - self.prepared_key = key - return - - if hasattr(key, "to_pem"): - # convert to PEM and let cryptography below load it as PEM - key = key.to_pem().decode("utf-8") - - if isinstance(key, dict): - self.prepared_key = self._process_jwk(key) - return - - if isinstance(key, str): - key = key.encode("utf-8") - - if isinstance(key, bytes): - # Attempt to load key. We don't know if it's - # a Public Key or a Private Key, so we try - # the Public Key first. - try: - try: - key = load_pem_public_key(key, self.cryptography_backend()) - except ValueError: - key = load_pem_private_key(key, password=None, backend=self.cryptography_backend()) - except Exception as e: - raise JWKError(e) - - self.prepared_key = key - return - - raise JWKError("Unable to parse an ECKey from key: %s" % key) - - def _process_jwk(self, jwk_dict): - if not jwk_dict.get("kty") == "EC": - raise JWKError("Incorrect key type. Expected: 'EC', Received: %s" % jwk_dict.get("kty")) - - if not all(k in jwk_dict for k in ["x", "y", "crv"]): - raise JWKError("Mandatory parameters are missing") - - x = base64_to_long(jwk_dict.get("x")) - y = base64_to_long(jwk_dict.get("y")) - curve = { - "P-256": ec.SECP256R1, - "P-384": ec.SECP384R1, - "P-521": ec.SECP521R1, - }[jwk_dict["crv"]] - - public = ec.EllipticCurvePublicNumbers(x, y, curve()) - - if "d" in jwk_dict: - d = base64_to_long(jwk_dict.get("d")) - private = ec.EllipticCurvePrivateNumbers(d, public) - - return private.private_key(self.cryptography_backend()) - else: - return public.public_key(self.cryptography_backend()) - - def _sig_component_length(self): - """Determine the correct serialization length for an encoded signature component. - - This is the number of bytes required to encode the maximum key value. - """ - return int(math.ceil(self.prepared_key.key_size / 8.0)) - - def _der_to_raw(self, der_signature): - """Convert signature from DER encoding to RAW encoding.""" - r, s = decode_dss_signature(der_signature) - component_length = self._sig_component_length() - return int_to_bytes(r, component_length) + int_to_bytes(s, component_length) - - def _raw_to_der(self, raw_signature): - """Convert signature from RAW encoding to DER encoding.""" - component_length = self._sig_component_length() - if len(raw_signature) != int(2 * component_length): - raise ValueError("Invalid signature") - - r_bytes = raw_signature[:component_length] - s_bytes = raw_signature[component_length:] - r = int.from_bytes(r_bytes, "big") - s = int.from_bytes(s_bytes, "big") - return encode_dss_signature(r, s) - - def sign(self, msg): - if self.hash_alg.digest_size * 8 > self.prepared_key.curve.key_size: - raise TypeError( - "this curve (%s) is too short " - "for your digest (%d)" % (self.prepared_key.curve.name, 8 * self.hash_alg.digest_size) - ) - signature = self.prepared_key.sign(msg, ec.ECDSA(self.hash_alg())) - return self._der_to_raw(signature) - - def verify(self, msg, sig): - try: - signature = self._raw_to_der(sig) - self.prepared_key.verify(signature, msg, ec.ECDSA(self.hash_alg())) - return True - except Exception: - return False - - def is_public(self): - return hasattr(self.prepared_key, "public_bytes") - - def public_key(self): - if self.is_public(): - return self - return self.__class__(self.prepared_key.public_key(), self._algorithm) - - def to_pem(self): - if self.is_public(): - pem = self.prepared_key.public_bytes( - encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo - ) - return pem - pem = self.prepared_key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.TraditionalOpenSSL, - encryption_algorithm=serialization.NoEncryption(), - ) - return pem - - def to_dict(self): - if not self.is_public(): - public_key = self.prepared_key.public_key() - else: - public_key = self.prepared_key - - crv = { - "secp256r1": "P-256", - "secp384r1": "P-384", - "secp521r1": "P-521", - }[self.prepared_key.curve.name] - - # Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of - # RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve - # points must be encoded as octed-strings of this length. - key_size = (self.prepared_key.curve.key_size + 7) // 8 - - data = { - "alg": self._algorithm, - "kty": "EC", - "crv": crv, - "x": long_to_base64(public_key.public_numbers().x, size=key_size).decode("ASCII"), - "y": long_to_base64(public_key.public_numbers().y, size=key_size).decode("ASCII"), - } - - if not self.is_public(): - private_value = self.prepared_key.private_numbers().private_value - data["d"] = long_to_base64(private_value, size=key_size).decode("ASCII") - - return data - - -class CryptographyRSAKey(Key): - SHA256 = hashes.SHA256 - SHA384 = hashes.SHA384 - SHA512 = hashes.SHA512 - - RSA1_5 = padding.PKCS1v15() - RSA_OAEP = padding.OAEP(padding.MGF1(hashes.SHA1()), hashes.SHA1(), None) - RSA_OAEP_256 = padding.OAEP(padding.MGF1(hashes.SHA256()), hashes.SHA256(), None) - - def __init__(self, key, algorithm, cryptography_backend=default_backend): - if algorithm not in ALGORITHMS.RSA: - raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) - - self.hash_alg = { - ALGORITHMS.RS256: self.SHA256, - ALGORITHMS.RS384: self.SHA384, - ALGORITHMS.RS512: self.SHA512, - }.get(algorithm) - self._algorithm = algorithm - - self.padding = { - ALGORITHMS.RSA1_5: self.RSA1_5, - ALGORITHMS.RSA_OAEP: self.RSA_OAEP, - ALGORITHMS.RSA_OAEP_256: self.RSA_OAEP_256, - }.get(algorithm) - - self.cryptography_backend = cryptography_backend - - # if it conforms to RSAPublicKey or RSAPrivateKey interface - if (hasattr(key, "public_bytes") and hasattr(key, "public_numbers")) or hasattr(key, "private_bytes"): - self.prepared_key = key - return - - if isinstance(key, dict): - self.prepared_key = self._process_jwk(key) - return - - if isinstance(key, str): - key = key.encode("utf-8") - - if isinstance(key, bytes): - try: - if key.startswith(b"-----BEGIN CERTIFICATE-----"): - self._process_cert(key) - return - - try: - self.prepared_key = load_pem_public_key(key, self.cryptography_backend()) - except ValueError: - self.prepared_key = load_pem_private_key(key, password=None, backend=self.cryptography_backend()) - except Exception as e: - raise JWKError(e) - return - - raise JWKError("Unable to parse an RSA_JWK from key: %s" % key) - - def _process_jwk(self, jwk_dict): - if not jwk_dict.get("kty") == "RSA": - raise JWKError("Incorrect key type. Expected: 'RSA', Received: %s" % jwk_dict.get("kty")) - - e = base64_to_long(jwk_dict.get("e", 256)) - n = base64_to_long(jwk_dict.get("n")) - public = rsa.RSAPublicNumbers(e, n) - - if "d" not in jwk_dict: - return public.public_key(self.cryptography_backend()) - else: - # This is a private key. - d = base64_to_long(jwk_dict.get("d")) - - extra_params = ["p", "q", "dp", "dq", "qi"] - - if any(k in jwk_dict for k in extra_params): - # Precomputed private key parameters are available. - if not all(k in jwk_dict for k in extra_params): - # These values must be present when 'p' is according to - # Section 6.3.2 of RFC7518, so if they are not we raise - # an error. - raise JWKError("Precomputed private key parameters are incomplete.") - - p = base64_to_long(jwk_dict["p"]) - q = base64_to_long(jwk_dict["q"]) - dp = base64_to_long(jwk_dict["dp"]) - dq = base64_to_long(jwk_dict["dq"]) - qi = base64_to_long(jwk_dict["qi"]) - else: - # The precomputed private key parameters are not available, - # so we use cryptography's API to fill them in. - p, q = rsa.rsa_recover_prime_factors(n, e, d) - dp = rsa.rsa_crt_dmp1(d, p) - dq = rsa.rsa_crt_dmq1(d, q) - qi = rsa.rsa_crt_iqmp(p, q) - - private = rsa.RSAPrivateNumbers(p, q, d, dp, dq, qi, public) - - return private.private_key(self.cryptography_backend()) - - def _process_cert(self, key): - key = load_pem_x509_certificate(key, self.cryptography_backend()) - self.prepared_key = key.public_key() - - def sign(self, msg): - try: - signature = self.prepared_key.sign(msg, padding.PKCS1v15(), self.hash_alg()) - except Exception as e: - raise JWKError(e) - return signature - - def verify(self, msg, sig): - if not self.is_public(): - warnings.warn("Attempting to verify a message with a private key. " "This is not recommended.") - - try: - self.public_key().prepared_key.verify(sig, msg, padding.PKCS1v15(), self.hash_alg()) - return True - except InvalidSignature: - return False - - def is_public(self): - return hasattr(self.prepared_key, "public_bytes") - - def public_key(self): - if self.is_public(): - return self - return self.__class__(self.prepared_key.public_key(), self._algorithm) - - def to_pem(self, pem_format="PKCS8"): - if self.is_public(): - if pem_format == "PKCS8": - fmt = serialization.PublicFormat.SubjectPublicKeyInfo - elif pem_format == "PKCS1": - fmt = serialization.PublicFormat.PKCS1 - else: - raise ValueError("Invalid format specified: %r" % pem_format) - pem = self.prepared_key.public_bytes(encoding=serialization.Encoding.PEM, format=fmt) - return pem - - if pem_format == "PKCS8": - fmt = serialization.PrivateFormat.PKCS8 - elif pem_format == "PKCS1": - fmt = serialization.PrivateFormat.TraditionalOpenSSL - else: - raise ValueError("Invalid format specified: %r" % pem_format) - - return self.prepared_key.private_bytes( - encoding=serialization.Encoding.PEM, format=fmt, encryption_algorithm=serialization.NoEncryption() - ) - - def to_dict(self): - if not self.is_public(): - public_key = self.prepared_key.public_key() - else: - public_key = self.prepared_key - - data = { - "alg": self._algorithm, - "kty": "RSA", - "n": long_to_base64(public_key.public_numbers().n).decode("ASCII"), - "e": long_to_base64(public_key.public_numbers().e).decode("ASCII"), - } - - if not self.is_public(): - data.update( - { - "d": long_to_base64(self.prepared_key.private_numbers().d).decode("ASCII"), - "p": long_to_base64(self.prepared_key.private_numbers().p).decode("ASCII"), - "q": long_to_base64(self.prepared_key.private_numbers().q).decode("ASCII"), - "dp": long_to_base64(self.prepared_key.private_numbers().dmp1).decode("ASCII"), - "dq": long_to_base64(self.prepared_key.private_numbers().dmq1).decode("ASCII"), - "qi": long_to_base64(self.prepared_key.private_numbers().iqmp).decode("ASCII"), - } - ) - - return data - - def wrap_key(self, key_data): - try: - wrapped_key = self.prepared_key.encrypt(key_data, self.padding) - except Exception as e: - raise JWEError(e) - - return wrapped_key - - def unwrap_key(self, wrapped_key): - try: - unwrapped_key = self.prepared_key.decrypt(wrapped_key, self.padding) - return unwrapped_key - except Exception as e: - raise JWEError(e) - - -class CryptographyAESKey(Key): - KEY_128 = (ALGORITHMS.A128GCM, ALGORITHMS.A128GCMKW, ALGORITHMS.A128KW, ALGORITHMS.A128CBC) - KEY_192 = (ALGORITHMS.A192GCM, ALGORITHMS.A192GCMKW, ALGORITHMS.A192KW, ALGORITHMS.A192CBC) - KEY_256 = ( - ALGORITHMS.A256GCM, - ALGORITHMS.A256GCMKW, - ALGORITHMS.A256KW, - ALGORITHMS.A128CBC_HS256, - ALGORITHMS.A256CBC, - ) - KEY_384 = (ALGORITHMS.A192CBC_HS384,) - KEY_512 = (ALGORITHMS.A256CBC_HS512,) - - AES_KW_ALGS = (ALGORITHMS.A128KW, ALGORITHMS.A192KW, ALGORITHMS.A256KW) - - MODES = { - ALGORITHMS.A128GCM: modes.GCM, - ALGORITHMS.A192GCM: modes.GCM, - ALGORITHMS.A256GCM: modes.GCM, - ALGORITHMS.A128CBC_HS256: modes.CBC, - ALGORITHMS.A192CBC_HS384: modes.CBC, - ALGORITHMS.A256CBC_HS512: modes.CBC, - ALGORITHMS.A128CBC: modes.CBC, - ALGORITHMS.A192CBC: modes.CBC, - ALGORITHMS.A256CBC: modes.CBC, - ALGORITHMS.A128GCMKW: modes.GCM, - ALGORITHMS.A192GCMKW: modes.GCM, - ALGORITHMS.A256GCMKW: modes.GCM, - ALGORITHMS.A128KW: None, - ALGORITHMS.A192KW: None, - ALGORITHMS.A256KW: None, - } - - IV_BYTE_LENGTH_MODE_MAP = {"CBC": algorithms.AES.block_size // 8, "GCM": 96 // 8} - - def __init__(self, key, algorithm): - if algorithm not in ALGORITHMS.AES: - raise JWKError("%s is not a valid AES algorithm" % algorithm) - if algorithm not in ALGORITHMS.SUPPORTED.union(ALGORITHMS.AES_PSEUDO): - raise JWKError("%s is not a supported algorithm" % algorithm) - - self._algorithm = algorithm - self._mode = self.MODES.get(self._algorithm) - - if algorithm in self.KEY_128 and len(key) != 16: - raise JWKError(f"Key must be 128 bit for alg {algorithm}") - elif algorithm in self.KEY_192 and len(key) != 24: - raise JWKError(f"Key must be 192 bit for alg {algorithm}") - elif algorithm in self.KEY_256 and len(key) != 32: - raise JWKError(f"Key must be 256 bit for alg {algorithm}") - elif algorithm in self.KEY_384 and len(key) != 48: - raise JWKError(f"Key must be 384 bit for alg {algorithm}") - elif algorithm in self.KEY_512 and len(key) != 64: - raise JWKError(f"Key must be 512 bit for alg {algorithm}") - - self._key = key - - def to_dict(self): - data = {"alg": self._algorithm, "kty": "oct", "k": base64url_encode(self._key)} - return data - - def encrypt(self, plain_text, aad=None): - plain_text = ensure_binary(plain_text) - try: - iv_byte_length = self.IV_BYTE_LENGTH_MODE_MAP.get(self._mode.name, algorithms.AES.block_size) - iv = get_random_bytes(iv_byte_length) - mode = self._mode(iv) - if mode.name == "GCM": - cipher = aead.AESGCM(self._key) - cipher_text_and_tag = cipher.encrypt(iv, plain_text, aad) - cipher_text = cipher_text_and_tag[: len(cipher_text_and_tag) - 16] - auth_tag = cipher_text_and_tag[-16:] - else: - cipher = Cipher(algorithms.AES(self._key), mode, backend=default_backend()) - encryptor = cipher.encryptor() - padder = PKCS7(algorithms.AES.block_size).padder() - padded_data = padder.update(plain_text) - padded_data += padder.finalize() - cipher_text = encryptor.update(padded_data) + encryptor.finalize() - auth_tag = None - return iv, cipher_text, auth_tag - except Exception as e: - raise JWEError(e) - - def decrypt(self, cipher_text, iv=None, aad=None, tag=None): - cipher_text = ensure_binary(cipher_text) - try: - iv = ensure_binary(iv) - mode = self._mode(iv) - if mode.name == "GCM": - if tag is None: - raise ValueError("tag cannot be None") - cipher = aead.AESGCM(self._key) - cipher_text_and_tag = cipher_text + tag - try: - plain_text = cipher.decrypt(iv, cipher_text_and_tag, aad) - except InvalidTag: - raise JWEError("Invalid JWE Auth Tag") - else: - cipher = Cipher(algorithms.AES(self._key), mode, backend=default_backend()) - decryptor = cipher.decryptor() - padded_plain_text = decryptor.update(cipher_text) - padded_plain_text += decryptor.finalize() - unpadder = PKCS7(algorithms.AES.block_size).unpadder() - plain_text = unpadder.update(padded_plain_text) - plain_text += unpadder.finalize() - - return plain_text - except Exception as e: - raise JWEError(e) - - def wrap_key(self, key_data): - key_data = ensure_binary(key_data) - cipher_text = aes_key_wrap(self._key, key_data, default_backend()) - return cipher_text # IV, cipher text, auth tag - - def unwrap_key(self, wrapped_key): - wrapped_key = ensure_binary(wrapped_key) - try: - plain_text = aes_key_unwrap(self._key, wrapped_key, default_backend()) - except InvalidUnwrap as cause: - raise JWEError(cause) - return plain_text - - -class CryptographyHMACKey(Key): - """ - Performs signing and verification operations using HMAC - and the specified hash function. - """ - - ALG_MAP = {ALGORITHMS.HS256: hashes.SHA256(), ALGORITHMS.HS384: hashes.SHA384(), ALGORITHMS.HS512: hashes.SHA512()} - - def __init__(self, key, algorithm): - if algorithm not in ALGORITHMS.HMAC: - raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) - self._algorithm = algorithm - self._hash_alg = self.ALG_MAP.get(algorithm) - - if isinstance(key, dict): - self.prepared_key = self._process_jwk(key) - return - - if not isinstance(key, str) and not isinstance(key, bytes): - raise JWKError("Expecting a string- or bytes-formatted key.") - - if isinstance(key, str): - key = key.encode("utf-8") - - if is_pem_format(key) or is_ssh_key(key): - raise JWKError( - "The specified key is an asymmetric key or x509 certificate and" - " should not be used as an HMAC secret." - ) - - self.prepared_key = key - - def _process_jwk(self, jwk_dict): - if not jwk_dict.get("kty") == "oct": - raise JWKError("Incorrect key type. Expected: 'oct', Received: %s" % jwk_dict.get("kty")) - - k = jwk_dict.get("k") - k = k.encode("utf-8") - k = bytes(k) - k = base64url_decode(k) - - return k - - def to_dict(self): - return { - "alg": self._algorithm, - "kty": "oct", - "k": base64url_encode(self.prepared_key).decode("ASCII"), - } - - def sign(self, msg): - msg = ensure_binary(msg) - h = hmac.HMAC(self.prepared_key, self._hash_alg, backend=default_backend()) - h.update(msg) - signature = h.finalize() - return signature - - def verify(self, msg, sig): - msg = ensure_binary(msg) - sig = ensure_binary(sig) - h = hmac.HMAC(self.prepared_key, self._hash_alg, backend=default_backend()) - h.update(msg) - try: - h.verify(sig) - verified = True - except InvalidSignature: - verified = False - return verified diff --git a/venv/lib/python3.12/site-packages/jose/backends/ecdsa_backend.py b/venv/lib/python3.12/site-packages/jose/backends/ecdsa_backend.py deleted file mode 100644 index 756c7ea8..00000000 --- a/venv/lib/python3.12/site-packages/jose/backends/ecdsa_backend.py +++ /dev/null @@ -1,150 +0,0 @@ -import hashlib - -import ecdsa - -from jose.backends.base import Key -from jose.constants import ALGORITHMS -from jose.exceptions import JWKError -from jose.utils import base64_to_long, long_to_base64 - - -class ECDSAECKey(Key): - """ - Performs signing and verification operations using - ECDSA and the specified hash function - - This class requires the ecdsa package to be installed. - - This is based off of the implementation in PyJWT 0.3.2 - """ - - SHA256 = hashlib.sha256 - SHA384 = hashlib.sha384 - SHA512 = hashlib.sha512 - - CURVE_MAP = { - SHA256: ecdsa.curves.NIST256p, - SHA384: ecdsa.curves.NIST384p, - SHA512: ecdsa.curves.NIST521p, - } - CURVE_NAMES = ( - (ecdsa.curves.NIST256p, "P-256"), - (ecdsa.curves.NIST384p, "P-384"), - (ecdsa.curves.NIST521p, "P-521"), - ) - - def __init__(self, key, algorithm): - if algorithm not in ALGORITHMS.EC: - raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) - - self.hash_alg = { - ALGORITHMS.ES256: self.SHA256, - ALGORITHMS.ES384: self.SHA384, - ALGORITHMS.ES512: self.SHA512, - }.get(algorithm) - self._algorithm = algorithm - - self.curve = self.CURVE_MAP.get(self.hash_alg) - - if isinstance(key, (ecdsa.SigningKey, ecdsa.VerifyingKey)): - self.prepared_key = key - return - - if isinstance(key, dict): - self.prepared_key = self._process_jwk(key) - return - - if isinstance(key, str): - key = key.encode("utf-8") - - if isinstance(key, bytes): - # Attempt to load key. We don't know if it's - # a Signing Key or a Verifying Key, so we try - # the Verifying Key first. - try: - key = ecdsa.VerifyingKey.from_pem(key) - except ecdsa.der.UnexpectedDER: - key = ecdsa.SigningKey.from_pem(key) - except Exception as e: - raise JWKError(e) - - self.prepared_key = key - return - - raise JWKError("Unable to parse an ECKey from key: %s" % key) - - def _process_jwk(self, jwk_dict): - if not jwk_dict.get("kty") == "EC": - raise JWKError("Incorrect key type. Expected: 'EC', Received: %s" % jwk_dict.get("kty")) - - if not all(k in jwk_dict for k in ["x", "y", "crv"]): - raise JWKError("Mandatory parameters are missing") - - if "d" in jwk_dict: - # We are dealing with a private key; the secret exponent is enough - # to create an ecdsa key. - d = base64_to_long(jwk_dict.get("d")) - return ecdsa.keys.SigningKey.from_secret_exponent(d, self.curve) - else: - x = base64_to_long(jwk_dict.get("x")) - y = base64_to_long(jwk_dict.get("y")) - - if not ecdsa.ecdsa.point_is_valid(self.curve.generator, x, y): - raise JWKError(f"Point: {x}, {y} is not a valid point") - - point = ecdsa.ellipticcurve.Point(self.curve.curve, x, y, self.curve.order) - return ecdsa.keys.VerifyingKey.from_public_point(point, self.curve) - - def sign(self, msg): - return self.prepared_key.sign( - msg, hashfunc=self.hash_alg, sigencode=ecdsa.util.sigencode_string, allow_truncate=False - ) - - def verify(self, msg, sig): - try: - return self.prepared_key.verify( - sig, msg, hashfunc=self.hash_alg, sigdecode=ecdsa.util.sigdecode_string, allow_truncate=False - ) - except Exception: - return False - - def is_public(self): - return isinstance(self.prepared_key, ecdsa.VerifyingKey) - - def public_key(self): - if self.is_public(): - return self - return self.__class__(self.prepared_key.get_verifying_key(), self._algorithm) - - def to_pem(self): - return self.prepared_key.to_pem() - - def to_dict(self): - if not self.is_public(): - public_key = self.prepared_key.get_verifying_key() - else: - public_key = self.prepared_key - crv = None - for key, value in self.CURVE_NAMES: - if key == self.prepared_key.curve: - crv = value - if not crv: - raise KeyError(f"Can't match {self.prepared_key.curve}") - - # Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of - # RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve - # points must be encoded as octed-strings of this length. - key_size = self.prepared_key.curve.baselen - - data = { - "alg": self._algorithm, - "kty": "EC", - "crv": crv, - "x": long_to_base64(public_key.pubkey.point.x(), size=key_size).decode("ASCII"), - "y": long_to_base64(public_key.pubkey.point.y(), size=key_size).decode("ASCII"), - } - - if not self.is_public(): - data["d"] = long_to_base64(self.prepared_key.privkey.secret_multiplier, size=key_size).decode("ASCII") - - return data diff --git a/venv/lib/python3.12/site-packages/jose/backends/native.py b/venv/lib/python3.12/site-packages/jose/backends/native.py deleted file mode 100644 index 8cc77dab..00000000 --- a/venv/lib/python3.12/site-packages/jose/backends/native.py +++ /dev/null @@ -1,69 +0,0 @@ -import hashlib -import hmac -import os - -from jose.backends.base import Key -from jose.constants import ALGORITHMS -from jose.exceptions import JWKError -from jose.utils import base64url_decode, base64url_encode, is_pem_format, is_ssh_key - - -def get_random_bytes(num_bytes): - return bytes(os.urandom(num_bytes)) - - -class HMACKey(Key): - """ - Performs signing and verification operations using HMAC - and the specified hash function. - """ - - HASHES = {ALGORITHMS.HS256: hashlib.sha256, ALGORITHMS.HS384: hashlib.sha384, ALGORITHMS.HS512: hashlib.sha512} - - def __init__(self, key, algorithm): - if algorithm not in ALGORITHMS.HMAC: - raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) - self._algorithm = algorithm - self._hash_alg = self.HASHES.get(algorithm) - - if isinstance(key, dict): - self.prepared_key = self._process_jwk(key) - return - - if not isinstance(key, str) and not isinstance(key, bytes): - raise JWKError("Expecting a string- or bytes-formatted key.") - - if isinstance(key, str): - key = key.encode("utf-8") - - if is_pem_format(key) or is_ssh_key(key): - raise JWKError( - "The specified key is an asymmetric key or x509 certificate and" - " should not be used as an HMAC secret." - ) - - self.prepared_key = key - - def _process_jwk(self, jwk_dict): - if not jwk_dict.get("kty") == "oct": - raise JWKError("Incorrect key type. Expected: 'oct', Received: %s" % jwk_dict.get("kty")) - - k = jwk_dict.get("k") - k = k.encode("utf-8") - k = bytes(k) - k = base64url_decode(k) - - return k - - def sign(self, msg): - return hmac.new(self.prepared_key, msg, self._hash_alg).digest() - - def verify(self, msg, sig): - return hmac.compare_digest(sig, self.sign(msg)) - - def to_dict(self): - return { - "alg": self._algorithm, - "kty": "oct", - "k": base64url_encode(self.prepared_key).decode("ASCII"), - } diff --git a/venv/lib/python3.12/site-packages/jose/backends/rsa_backend.py b/venv/lib/python3.12/site-packages/jose/backends/rsa_backend.py deleted file mode 100644 index 8139d698..00000000 --- a/venv/lib/python3.12/site-packages/jose/backends/rsa_backend.py +++ /dev/null @@ -1,283 +0,0 @@ -import binascii -import warnings - -import rsa as pyrsa -import rsa.pem as pyrsa_pem -from pyasn1.error import PyAsn1Error -from rsa import DecryptionError - -from jose.backends._asn1 import ( - rsa_private_key_pkcs1_to_pkcs8, - rsa_private_key_pkcs8_to_pkcs1, - rsa_public_key_pkcs1_to_pkcs8, -) -from jose.backends.base import Key -from jose.constants import ALGORITHMS -from jose.exceptions import JWEError, JWKError -from jose.utils import base64_to_long, long_to_base64 - -ALGORITHMS.SUPPORTED.remove(ALGORITHMS.RSA_OAEP) # RSA OAEP not supported - -LEGACY_INVALID_PKCS8_RSA_HEADER = binascii.unhexlify( - "30" # sequence - "8204BD" # DER-encoded sequence contents length of 1213 bytes -- INCORRECT STATIC LENGTH - "020100" # integer: 0 -- Version - "30" # sequence - "0D" # DER-encoded sequence contents length of 13 bytes -- PrivateKeyAlgorithmIdentifier - "06092A864886F70D010101" # OID -- rsaEncryption - "0500" # NULL -- parameters -) -ASN1_SEQUENCE_ID = binascii.unhexlify("30") -RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1" - -# Functions gcd and rsa_recover_prime_factors were copied from cryptography 1.9 -# to enable pure python rsa module to be in compliance with section 6.3.1 of RFC7518 -# which requires only private exponent (d) for private key. - - -def _gcd(a, b): - """Calculate the Greatest Common Divisor of a and b. - - Unless b==0, the result will have the same sign as b (so that when - b is divided by it, the result comes out positive). - """ - while b: - a, b = b, (a % b) - return a - - -# Controls the number of iterations rsa_recover_prime_factors will perform -# to obtain the prime factors. Each iteration increments by 2 so the actual -# maximum attempts is half this number. -_MAX_RECOVERY_ATTEMPTS = 1000 - - -def _rsa_recover_prime_factors(n, e, d): - """ - Compute factors p and q from the private exponent d. We assume that n has - no more than two factors. This function is adapted from code in PyCrypto. - """ - # See 8.2.2(i) in Handbook of Applied Cryptography. - ktot = d * e - 1 - # The quantity d*e-1 is a multiple of phi(n), even, - # and can be represented as t*2^s. - t = ktot - while t % 2 == 0: - t = t // 2 - # Cycle through all multiplicative inverses in Zn. - # The algorithm is non-deterministic, but there is a 50% chance - # any candidate a leads to successful factoring. - # See "Digitalized Signatures and Public Key Functions as Intractable - # as Factorization", M. Rabin, 1979 - spotted = False - a = 2 - while not spotted and a < _MAX_RECOVERY_ATTEMPTS: - k = t - # Cycle through all values a^{t*2^i}=a^k - while k < ktot: - cand = pow(a, k, n) - # Check if a^k is a non-trivial root of unity (mod n) - if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: - # We have found a number such that (cand-1)(cand+1)=0 (mod n). - # Either of the terms divides n. - p = _gcd(cand + 1, n) - spotted = True - break - k *= 2 - # This value was not any good... let's try another! - a += 2 - if not spotted: - raise ValueError("Unable to compute factors p and q from exponent d.") - # Found ! - q, r = divmod(n, p) - assert r == 0 - p, q = sorted((p, q), reverse=True) - return (p, q) - - -def pem_to_spki(pem, fmt="PKCS8"): - key = RSAKey(pem, ALGORITHMS.RS256) - return key.to_pem(fmt) - - -def _legacy_private_key_pkcs8_to_pkcs1(pkcs8_key): - """Legacy RSA private key PKCS8-to-PKCS1 conversion. - - .. warning:: - - This is incorrect parsing and only works because the legacy PKCS1-to-PKCS8 - encoding was also incorrect. - """ - # Only allow this processing if the prefix matches - # AND the following byte indicates an ASN1 sequence, - # as we would expect with the legacy encoding. - if not pkcs8_key.startswith(LEGACY_INVALID_PKCS8_RSA_HEADER + ASN1_SEQUENCE_ID): - raise ValueError("Invalid private key encoding") - - return pkcs8_key[len(LEGACY_INVALID_PKCS8_RSA_HEADER) :] - - -class RSAKey(Key): - SHA256 = "SHA-256" - SHA384 = "SHA-384" - SHA512 = "SHA-512" - - def __init__(self, key, algorithm): - if algorithm not in ALGORITHMS.RSA: - raise JWKError("hash_alg: %s is not a valid hash algorithm" % algorithm) - - if algorithm in ALGORITHMS.RSA_KW and algorithm != ALGORITHMS.RSA1_5: - raise JWKError("alg: %s is not supported by the RSA backend" % algorithm) - - self.hash_alg = { - ALGORITHMS.RS256: self.SHA256, - ALGORITHMS.RS384: self.SHA384, - ALGORITHMS.RS512: self.SHA512, - }.get(algorithm) - self._algorithm = algorithm - - if isinstance(key, dict): - self._prepared_key = self._process_jwk(key) - return - - if isinstance(key, (pyrsa.PublicKey, pyrsa.PrivateKey)): - self._prepared_key = key - return - - if isinstance(key, str): - key = key.encode("utf-8") - - if isinstance(key, bytes): - try: - self._prepared_key = pyrsa.PublicKey.load_pkcs1(key) - except ValueError: - try: - self._prepared_key = pyrsa.PublicKey.load_pkcs1_openssl_pem(key) - except ValueError: - try: - self._prepared_key = pyrsa.PrivateKey.load_pkcs1(key) - except ValueError: - try: - der = pyrsa_pem.load_pem(key, b"PRIVATE KEY") - try: - pkcs1_key = rsa_private_key_pkcs8_to_pkcs1(der) - except PyAsn1Error: - # If the key was encoded using the old, invalid, - # encoding then pyasn1 will throw an error attempting - # to parse the key. - pkcs1_key = _legacy_private_key_pkcs8_to_pkcs1(der) - self._prepared_key = pyrsa.PrivateKey.load_pkcs1(pkcs1_key, format="DER") - except ValueError as e: - raise JWKError(e) - return - raise JWKError("Unable to parse an RSA_JWK from key: %s" % key) - - def _process_jwk(self, jwk_dict): - if not jwk_dict.get("kty") == "RSA": - raise JWKError("Incorrect key type. Expected: 'RSA', Received: %s" % jwk_dict.get("kty")) - - e = base64_to_long(jwk_dict.get("e")) - n = base64_to_long(jwk_dict.get("n")) - - if "d" not in jwk_dict: - return pyrsa.PublicKey(e=e, n=n) - else: - d = base64_to_long(jwk_dict.get("d")) - extra_params = ["p", "q", "dp", "dq", "qi"] - - if any(k in jwk_dict for k in extra_params): - # Precomputed private key parameters are available. - if not all(k in jwk_dict for k in extra_params): - # These values must be present when 'p' is according to - # Section 6.3.2 of RFC7518, so if they are not we raise - # an error. - raise JWKError("Precomputed private key parameters are incomplete.") - - p = base64_to_long(jwk_dict["p"]) - q = base64_to_long(jwk_dict["q"]) - return pyrsa.PrivateKey(e=e, n=n, d=d, p=p, q=q) - else: - p, q = _rsa_recover_prime_factors(n, e, d) - return pyrsa.PrivateKey(n=n, e=e, d=d, p=p, q=q) - - def sign(self, msg): - return pyrsa.sign(msg, self._prepared_key, self.hash_alg) - - def verify(self, msg, sig): - if not self.is_public(): - warnings.warn("Attempting to verify a message with a private key. " "This is not recommended.") - try: - pyrsa.verify(msg, sig, self._prepared_key) - return True - except pyrsa.pkcs1.VerificationError: - return False - - def is_public(self): - return isinstance(self._prepared_key, pyrsa.PublicKey) - - def public_key(self): - if isinstance(self._prepared_key, pyrsa.PublicKey): - return self - return self.__class__(pyrsa.PublicKey(n=self._prepared_key.n, e=self._prepared_key.e), self._algorithm) - - def to_pem(self, pem_format="PKCS8"): - if isinstance(self._prepared_key, pyrsa.PrivateKey): - der = self._prepared_key.save_pkcs1(format="DER") - if pem_format == "PKCS8": - pkcs8_der = rsa_private_key_pkcs1_to_pkcs8(der) - pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker="PRIVATE KEY") - elif pem_format == "PKCS1": - pem = pyrsa_pem.save_pem(der, pem_marker="RSA PRIVATE KEY") - else: - raise ValueError(f"Invalid pem format specified: {pem_format!r}") - else: - if pem_format == "PKCS8": - pkcs1_der = self._prepared_key.save_pkcs1(format="DER") - pkcs8_der = rsa_public_key_pkcs1_to_pkcs8(pkcs1_der) - pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker="PUBLIC KEY") - elif pem_format == "PKCS1": - der = self._prepared_key.save_pkcs1(format="DER") - pem = pyrsa_pem.save_pem(der, pem_marker="RSA PUBLIC KEY") - else: - raise ValueError(f"Invalid pem format specified: {pem_format!r}") - return pem - - def to_dict(self): - if not self.is_public(): - public_key = self.public_key()._prepared_key - else: - public_key = self._prepared_key - - data = { - "alg": self._algorithm, - "kty": "RSA", - "n": long_to_base64(public_key.n).decode("ASCII"), - "e": long_to_base64(public_key.e).decode("ASCII"), - } - - if not self.is_public(): - data.update( - { - "d": long_to_base64(self._prepared_key.d).decode("ASCII"), - "p": long_to_base64(self._prepared_key.p).decode("ASCII"), - "q": long_to_base64(self._prepared_key.q).decode("ASCII"), - "dp": long_to_base64(self._prepared_key.exp1).decode("ASCII"), - "dq": long_to_base64(self._prepared_key.exp2).decode("ASCII"), - "qi": long_to_base64(self._prepared_key.coef).decode("ASCII"), - } - ) - - return data - - def wrap_key(self, key_data): - if not self.is_public(): - warnings.warn("Attempting to encrypt a message with a private key." " This is not recommended.") - wrapped_key = pyrsa.encrypt(key_data, self._prepared_key) - return wrapped_key - - def unwrap_key(self, wrapped_key): - try: - unwrapped_key = pyrsa.decrypt(wrapped_key, self._prepared_key) - except DecryptionError as e: - raise JWEError(e) - return unwrapped_key diff --git a/venv/lib/python3.12/site-packages/jose/constants.py b/venv/lib/python3.12/site-packages/jose/constants.py deleted file mode 100644 index 58787d46..00000000 --- a/venv/lib/python3.12/site-packages/jose/constants.py +++ /dev/null @@ -1,100 +0,0 @@ -import hashlib - - -class Algorithms: - # DS Algorithms - NONE = "none" - HS256 = "HS256" - HS384 = "HS384" - HS512 = "HS512" - RS256 = "RS256" - RS384 = "RS384" - RS512 = "RS512" - ES256 = "ES256" - ES384 = "ES384" - ES512 = "ES512" - - # Content Encryption Algorithms - A128CBC_HS256 = "A128CBC-HS256" - A192CBC_HS384 = "A192CBC-HS384" - A256CBC_HS512 = "A256CBC-HS512" - A128GCM = "A128GCM" - A192GCM = "A192GCM" - A256GCM = "A256GCM" - - # Pseudo algorithm for encryption - A128CBC = "A128CBC" - A192CBC = "A192CBC" - A256CBC = "A256CBC" - - # CEK Encryption Algorithms - DIR = "dir" - RSA1_5 = "RSA1_5" - RSA_OAEP = "RSA-OAEP" - RSA_OAEP_256 = "RSA-OAEP-256" - A128KW = "A128KW" - A192KW = "A192KW" - A256KW = "A256KW" - ECDH_ES = "ECDH-ES" - ECDH_ES_A128KW = "ECDH-ES+A128KW" - ECDH_ES_A192KW = "ECDH-ES+A192KW" - ECDH_ES_A256KW = "ECDH-ES+A256KW" - A128GCMKW = "A128GCMKW" - A192GCMKW = "A192GCMKW" - A256GCMKW = "A256GCMKW" - PBES2_HS256_A128KW = "PBES2-HS256+A128KW" - PBES2_HS384_A192KW = "PBES2-HS384+A192KW" - PBES2_HS512_A256KW = "PBES2-HS512+A256KW" - - # Compression Algorithms - DEF = "DEF" - - HMAC = {HS256, HS384, HS512} - RSA_DS = {RS256, RS384, RS512} - RSA_KW = {RSA1_5, RSA_OAEP, RSA_OAEP_256} - RSA = RSA_DS.union(RSA_KW) - EC_DS = {ES256, ES384, ES512} - EC_KW = {ECDH_ES, ECDH_ES_A128KW, ECDH_ES_A192KW, ECDH_ES_A256KW} - EC = EC_DS.union(EC_KW) - AES_PSEUDO = {A128CBC, A192CBC, A256CBC, A128GCM, A192GCM, A256GCM} - AES_JWE_ENC = {A128CBC_HS256, A192CBC_HS384, A256CBC_HS512, A128GCM, A192GCM, A256GCM} - AES_ENC = AES_JWE_ENC.union(AES_PSEUDO) - AES_KW = {A128KW, A192KW, A256KW} - AEC_GCM_KW = {A128GCMKW, A192GCMKW, A256GCMKW} - AES = AES_ENC.union(AES_KW) - PBES2_KW = {PBES2_HS256_A128KW, PBES2_HS384_A192KW, PBES2_HS512_A256KW} - - HMAC_AUTH_TAG = {A128CBC_HS256, A192CBC_HS384, A256CBC_HS512} - GCM = {A128GCM, A192GCM, A256GCM} - - SUPPORTED = HMAC.union(RSA_DS).union(EC_DS).union([DIR]).union(AES_JWE_ENC).union(RSA_KW).union(AES_KW) - - ALL = SUPPORTED.union([NONE]).union(AEC_GCM_KW).union(EC_KW).union(PBES2_KW) - - HASHES = { - HS256: hashlib.sha256, - HS384: hashlib.sha384, - HS512: hashlib.sha512, - RS256: hashlib.sha256, - RS384: hashlib.sha384, - RS512: hashlib.sha512, - ES256: hashlib.sha256, - ES384: hashlib.sha384, - ES512: hashlib.sha512, - } - - KEYS = {} - - -ALGORITHMS = Algorithms() - - -class Zips: - DEF = "DEF" - NONE = None - SUPPORTED = {DEF, NONE} - - -ZIPS = Zips() - -JWE_SIZE_LIMIT = 250 * 1024 diff --git a/venv/lib/python3.12/site-packages/jose/exceptions.py b/venv/lib/python3.12/site-packages/jose/exceptions.py deleted file mode 100644 index e8edc3b6..00000000 --- a/venv/lib/python3.12/site-packages/jose/exceptions.py +++ /dev/null @@ -1,59 +0,0 @@ -class JOSEError(Exception): - pass - - -class JWSError(JOSEError): - pass - - -class JWSSignatureError(JWSError): - pass - - -class JWSAlgorithmError(JWSError): - pass - - -class JWTError(JOSEError): - pass - - -class JWTClaimsError(JWTError): - pass - - -class ExpiredSignatureError(JWTError): - pass - - -class JWKError(JOSEError): - pass - - -class JWEError(JOSEError): - """Base error for all JWE errors""" - - pass - - -class JWEParseError(JWEError): - """Could not parse the JWE string provided""" - - pass - - -class JWEInvalidAuth(JWEError): - """ - The authentication tag did not match the protected sections of the - JWE string provided - """ - - pass - - -class JWEAlgorithmUnsupportedError(JWEError): - """ - The JWE algorithm is not supported by the backend - """ - - pass diff --git a/venv/lib/python3.12/site-packages/jose/jwe.py b/venv/lib/python3.12/site-packages/jose/jwe.py deleted file mode 100644 index 09e5c329..00000000 --- a/venv/lib/python3.12/site-packages/jose/jwe.py +++ /dev/null @@ -1,619 +0,0 @@ -import binascii -import json -import zlib -from collections.abc import Mapping -from struct import pack - -from . import jwk -from .backends import get_random_bytes -from .constants import ALGORITHMS, JWE_SIZE_LIMIT, ZIPS -from .exceptions import JWEError, JWEParseError -from .utils import base64url_decode, base64url_encode, ensure_binary - - -def encrypt(plaintext, key, encryption=ALGORITHMS.A256GCM, algorithm=ALGORITHMS.DIR, zip=None, cty=None, kid=None): - """Encrypts plaintext and returns a JWE compact serialization string. - - Args: - plaintext (bytes): A bytes object to encrypt - key (str or dict): The key(s) to use for encrypting the content. Can be - individual JWK or JWK set. - encryption (str, optional): The content encryption algorithm used to - perform authenticated encryption on the plaintext to produce the - ciphertext and the Authentication Tag. Defaults to A256GCM. - algorithm (str, optional): The cryptographic algorithm used - to encrypt or determine the value of the CEK. Defaults to dir. - zip (str, optional): The compression algorithm) applied to the - plaintext before encryption. Defaults to None. - cty (str, optional): The media type for the secured content. - See http://www.iana.org/assignments/media-types/media-types.xhtml - kid (str, optional): Key ID for the provided key - - Returns: - bytes: The string representation of the header, encrypted key, - initialization vector, ciphertext, and authentication tag. - - Raises: - JWEError: If there is an error signing the token. - - Examples: - >>> from jose import jwe - >>> jwe.encrypt('Hello, World!', 'asecret128bitkey', algorithm='dir', encryption='A128GCM') - 'eyJhbGciOiJkaXIiLCJlbmMiOiJBMTI4R0NNIn0..McILMB3dYsNJSuhcDzQshA.OfX9H_mcUpHDeRM4IA.CcnTWqaqxNsjT4eCaUABSg' - - """ - plaintext = ensure_binary(plaintext) # Make sure it's bytes - if algorithm not in ALGORITHMS.SUPPORTED: - raise JWEError("Algorithm %s not supported." % algorithm) - if encryption not in ALGORITHMS.SUPPORTED: - raise JWEError("Algorithm %s not supported." % encryption) - key = jwk.construct(key, algorithm) - encoded_header = _encoded_header(algorithm, encryption, zip, cty, kid) - - plaintext = _compress(zip, plaintext) - enc_cek, iv, cipher_text, auth_tag = _encrypt_and_auth(key, algorithm, encryption, zip, plaintext, encoded_header) - - jwe_string = _jwe_compact_serialize(encoded_header, enc_cek, iv, cipher_text, auth_tag) - return jwe_string - - -def decrypt(jwe_str, key): - """Decrypts a JWE compact serialized string and returns the plaintext. - - Args: - jwe_str (str): A JWE to be decrypt. - key (str or dict): A key to attempt to decrypt the payload with. Can be - individual JWK or JWK set. - - Returns: - bytes: The plaintext bytes, assuming the authentication tag is valid. - - Raises: - JWEError: If there is an exception verifying the token. - - Examples: - >>> from jose import jwe - >>> jwe.decrypt(jwe_string, 'asecret128bitkey') - 'Hello, World!' - """ - - # Limit the token size - if the data is compressed then decompressing the - # data could lead to large memory usage. This helps address This addresses - # CVE-2024-33664. Also see _decompress() - if len(jwe_str) > JWE_SIZE_LIMIT: - raise JWEError(f"JWE string {len(jwe_str)} bytes exceeds {JWE_SIZE_LIMIT} bytes") - - header, encoded_header, encrypted_key, iv, cipher_text, auth_tag = _jwe_compact_deserialize(jwe_str) - - # Verify that the implementation understands and can process all - # fields that it is required to support, whether required by this - # specification, by the algorithms being used, or by the "crit" - # Header Parameter value, and that the values of those parameters - # are also understood and supported. - - try: - # Determine the Key Management Mode employed by the algorithm - # specified by the "alg" (algorithm) Header Parameter. - alg = header["alg"] - enc = header["enc"] - if alg not in ALGORITHMS.SUPPORTED: - raise JWEError("Algorithm %s not supported." % alg) - if enc not in ALGORITHMS.SUPPORTED: - raise JWEError("Algorithm %s not supported." % enc) - - except KeyError: - raise JWEParseError("alg and enc headers are required!") - - # Verify that the JWE uses a key known to the recipient. - key = jwk.construct(key, alg) - - # When Direct Key Agreement or Key Agreement with Key Wrapping are - # employed, use the key agreement algorithm to compute the value - # of the agreed upon key. When Direct Key Agreement is employed, - # let the CEK be the agreed upon key. When Key Agreement with Key - # Wrapping is employed, the agreed upon key will be used to - # decrypt the JWE Encrypted Key. - # - # When Key Wrapping, Key Encryption, or Key Agreement with Key - # Wrapping are employed, decrypt the JWE Encrypted Key to produce - # the CEK. The CEK MUST have a length equal to that required for - # the content encryption algorithm. Note that when there are - # multiple recipients, each recipient will only be able to decrypt - # JWE Encrypted Key values that were encrypted to a key in that - # recipient's possession. It is therefore normal to only be able - # to decrypt one of the per-recipient JWE Encrypted Key values to - # obtain the CEK value. Also, see Section 11.5 for security - # considerations on mitigating timing attacks. - if alg == ALGORITHMS.DIR: - # When Direct Key Agreement or Direct Encryption are employed, - # verify that the JWE Encrypted Key value is an empty octet - # sequence. - - # Record whether the CEK could be successfully determined for this - # recipient or not. - cek_valid = encrypted_key == b"" - - # When Direct Encryption is employed, let the CEK be the shared - # symmetric key. - cek_bytes = _get_key_bytes_from_key(key) - else: - try: - cek_bytes = key.unwrap_key(encrypted_key) - - # Record whether the CEK could be successfully determined for this - # recipient or not. - cek_valid = True - except NotImplementedError: - raise JWEError(f"alg {alg} is not implemented") - except Exception: - # Record whether the CEK could be successfully determined for this - # recipient or not. - cek_valid = False - - # To mitigate the attacks described in RFC 3218 [RFC3218], the - # recipient MUST NOT distinguish between format, padding, and length - # errors of encrypted keys. It is strongly recommended, in the event - # of receiving an improperly formatted key, that the recipient - # substitute a randomly generated CEK and proceed to the next step, to - # mitigate timing attacks. - cek_bytes = _get_random_cek_bytes_for_enc(enc) - - # Compute the Encoded Protected Header value BASE64URL(UTF8(JWE - # Protected Header)). If the JWE Protected Header is not present - # (which can only happen when using the JWE JSON Serialization and - # no "protected" member is present), let this value be the empty - # string. - protected_header = encoded_header - - # Let the Additional Authenticated Data encryption parameter be - # ASCII(Encoded Protected Header). However, if a JWE AAD value is - # present (which can only be the case when using the JWE JSON - # Serialization), instead let the Additional Authenticated Data - # encryption parameter be ASCII(Encoded Protected Header || '.' || - # BASE64URL(JWE AAD)). - aad = protected_header - - # Decrypt the JWE Ciphertext using the CEK, the JWE Initialization - # Vector, the Additional Authenticated Data value, and the JWE - # Authentication Tag (which is the Authentication Tag input to the - # calculation) using the specified content encryption algorithm, - # returning the decrypted plaintext and validating the JWE - # Authentication Tag in the manner specified for the algorithm, - # rejecting the input without emitting any decrypted output if the - # JWE Authentication Tag is incorrect. - try: - plain_text = _decrypt_and_auth(cek_bytes, enc, cipher_text, iv, aad, auth_tag) - except NotImplementedError: - raise JWEError(f"enc {enc} is not implemented") - except Exception as e: - raise JWEError(e) - - # If a "zip" parameter was included, uncompress the decrypted - # plaintext using the specified compression algorithm. - if plain_text is not None: - plain_text = _decompress(header.get("zip"), plain_text) - - return plain_text if cek_valid else None - - -def get_unverified_header(jwe_str): - """Returns the decoded headers without verification of any kind. - - Args: - jwe_str (str): A compact serialized JWE to decode the headers from. - - Returns: - dict: The dict representation of the JWE headers. - - Raises: - JWEError: If there is an exception decoding the JWE. - """ - header = _jwe_compact_deserialize(jwe_str)[0] - return header - - -def _decrypt_and_auth(cek_bytes, enc, cipher_text, iv, aad, auth_tag): - """ - Decrypt and verify the data - - Args: - cek_bytes (bytes): cek to derive encryption and possible auth key to - verify the auth tag - cipher_text (bytes): Encrypted data - iv (bytes): Initialization vector (iv) used to encrypt data - aad (bytes): Additional Authenticated Data used to verify the data - auth_tag (bytes): Authentication ntag to verify the data - - Returns: - (bytes): Decrypted data - """ - # Decrypt the JWE Ciphertext using the CEK, the JWE Initialization - # Vector, the Additional Authenticated Data value, and the JWE - # Authentication Tag (which is the Authentication Tag input to the - # calculation) using the specified content encryption algorithm, - # returning the decrypted plaintext - # and validating the JWE - # Authentication Tag in the manner specified for the algorithm, - if enc in ALGORITHMS.HMAC_AUTH_TAG: - encryption_key, mac_key, key_len = _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc) - auth_tag_check = _auth_tag(cipher_text, iv, aad, mac_key, key_len) - elif enc in ALGORITHMS.GCM: - encryption_key = jwk.construct(cek_bytes, enc) - auth_tag_check = auth_tag # GCM check auth on decrypt - else: - raise NotImplementedError(f"enc {enc} is not implemented!") - - plaintext = encryption_key.decrypt(cipher_text, iv, aad, auth_tag) - if auth_tag != auth_tag_check: - raise JWEError("Invalid JWE Auth Tag") - - return plaintext - - -def _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc): - derived_key_len = len(cek_bytes) // 2 - mac_key_bytes = cek_bytes[0:derived_key_len] - mac_key = _get_hmac_key(enc, mac_key_bytes) - encryption_key_bytes = cek_bytes[-derived_key_len:] - encryption_alg, _ = enc.split("-") - encryption_key = jwk.construct(encryption_key_bytes, encryption_alg) - return encryption_key, mac_key, derived_key_len - - -def _jwe_compact_deserialize(jwe_bytes): - """ - Deserialize and verify the header and segments are appropriate. - - Args: - jwe_bytes (bytes): The compact serialized JWE - Returns: - (dict, bytes, bytes, bytes, bytes, bytes) - """ - - # Base64url decode the encoded representations of the JWE - # Protected Header, the JWE Encrypted Key, the JWE Initialization - # Vector, the JWE Ciphertext, the JWE Authentication Tag, and the - # JWE AAD, following the restriction that no line breaks, - # whitespace, or other additional characters have been used. - jwe_bytes = ensure_binary(jwe_bytes) - try: - header_segment, encrypted_key_segment, iv_segment, cipher_text_segment, auth_tag_segment = jwe_bytes.split( - b".", 4 - ) - header_data = base64url_decode(header_segment) - except ValueError: - raise JWEParseError("Not enough segments") - except (TypeError, binascii.Error): - raise JWEParseError("Invalid header") - - # Verify that the octet sequence resulting from decoding the - # encoded JWE Protected Header is a UTF-8-encoded representation - # of a completely valid JSON object conforming to RFC 7159 - # [RFC7159]; let the JWE Protected Header be this JSON object. - # - # If using the JWE Compact Serialization, let the JOSE Header be - # the JWE Protected Header. Otherwise, when using the JWE JSON - # Serialization, let the JOSE Header be the union of the members - # of the JWE Protected Header, the JWE Shared Unprotected Header - # and the corresponding JWE Per-Recipient Unprotected Header, all - # of which must be completely valid JSON objects. During this - # step, verify that the resulting JOSE Header does not contain - # duplicate Header Parameter names. When using the JWE JSON - # Serialization, this restriction includes that the same Header - # Parameter name also MUST NOT occur in distinct JSON object - # values that together comprise the JOSE Header. - - try: - header = json.loads(header_data) - except ValueError as e: - raise JWEParseError(f"Invalid header string: {e}") - - if not isinstance(header, Mapping): - raise JWEParseError("Invalid header string: must be a json object") - - try: - encrypted_key = base64url_decode(encrypted_key_segment) - except (TypeError, binascii.Error): - raise JWEParseError("Invalid encrypted key") - - try: - iv = base64url_decode(iv_segment) - except (TypeError, binascii.Error): - raise JWEParseError("Invalid IV") - - try: - ciphertext = base64url_decode(cipher_text_segment) - except (TypeError, binascii.Error): - raise JWEParseError("Invalid cyphertext") - - try: - auth_tag = base64url_decode(auth_tag_segment) - except (TypeError, binascii.Error): - raise JWEParseError("Invalid auth tag") - - return header, header_segment, encrypted_key, iv, ciphertext, auth_tag - - -def _encoded_header(alg, enc, zip, cty, kid): - """ - Generate an appropriate JOSE header based on the values provided - Args: - alg (str): Key wrap/negotiation algorithm - enc (str): Encryption algorithm - zip (str): Compression method - cty (str): Content type of the encrypted data - kid (str): ID for the key used for the operation - - Returns: - bytes: JSON object of header based on input - """ - header = {"alg": alg, "enc": enc} - if zip: - header["zip"] = zip - if cty: - header["cty"] = cty - if kid: - header["kid"] = kid - json_header = json.dumps( - header, - separators=(",", ":"), - sort_keys=True, - ).encode("utf-8") - return base64url_encode(json_header) - - -def _big_endian(int_val): - return pack("!Q", int_val) - - -def _encrypt_and_auth(key, alg, enc, zip, plaintext, aad): - """ - Generate a content encryption key (cek) and initialization - vector (iv) based on enc and alg, compress the plaintext based on zip, - encrypt the compressed plaintext using the cek and iv based on enc - - Args: - key (Key): The key provided for encryption - alg (str): The algorithm use for key wrap/negotiation - enc (str): The encryption algorithm with which to encrypt the plaintext - zip (str): The compression algorithm with which to compress the plaintext - plaintext (bytes): The data to encrypt - aad (str): Additional authentication data utilized for generating an - auth tag - - Returns: - (bytes, bytes, bytes, bytes): A tuple of the following data - (key wrapped cek, iv, cipher text, auth tag) - """ - try: - cek_bytes, kw_cek = _get_cek(enc, alg, key) - except NotImplementedError: - raise JWEError(f"alg {alg} is not implemented") - - if enc in ALGORITHMS.HMAC_AUTH_TAG: - encryption_key, mac_key, key_len = _get_encryption_key_mac_key_and_key_length_from_cek(cek_bytes, enc) - iv, ciphertext, tag = encryption_key.encrypt(plaintext, aad) - auth_tag = _auth_tag(ciphertext, iv, aad, mac_key, key_len) - elif enc in ALGORITHMS.GCM: - encryption_key = jwk.construct(cek_bytes, enc) - iv, ciphertext, auth_tag = encryption_key.encrypt(plaintext, aad) - else: - raise NotImplementedError(f"enc {enc} is not implemented!") - - return kw_cek, iv, ciphertext, auth_tag - - -def _get_hmac_key(enc, mac_key_bytes): - """ - Get an HMACKey for the provided encryption algorithm and key bytes - - Args: - enc (str): Encryption algorithm - mac_key_bytes (bytes): vytes for the HMAC key - - Returns: - (HMACKey): The key to perform HMAC actions - """ - _, hash_alg = enc.split("-") - mac_key = jwk.construct(mac_key_bytes, hash_alg) - return mac_key - - -def _compress(zip, plaintext): - """ - Compress the plaintext based on the algorithm supplied - - Args: - zip (str): Compression Algorithm - plaintext (bytes): plaintext to compress - - Returns: - (bytes): Compressed plaintext - """ - if zip not in ZIPS.SUPPORTED: - raise NotImplementedError(f"ZIP {zip} is not supported!") - if zip is None: - compressed = plaintext - elif zip == ZIPS.DEF: - compressed = zlib.compress(plaintext) - else: - raise NotImplementedError(f"ZIP {zip} is not implemented!") - return compressed - - -def _decompress(zip, compressed): - """ - Decompress the plaintext based on the algorithm supplied - - Args: - zip (str): Compression Algorithm - plaintext (bytes): plaintext to decompress - - Returns: - (bytes): Compressed plaintext - """ - if zip not in ZIPS.SUPPORTED: - raise NotImplementedError(f"ZIP {zip} is not supported!") - if zip is None: - decompressed = compressed - elif zip == ZIPS.DEF: - # If, during decompression, there is more data than expected, the - # decompression halts and raise an error. This addresses CVE-2024-33664 - decompressor = zlib.decompressobj() - decompressed = decompressor.decompress(compressed, max_length=JWE_SIZE_LIMIT) - if decompressor.unconsumed_tail: - raise JWEError(f"Decompressed JWE string exceeds {JWE_SIZE_LIMIT} bytes") - else: - raise NotImplementedError(f"ZIP {zip} is not implemented!") - return decompressed - - -def _get_cek(enc, alg, key): - """ - Get the content encryption key - - Args: - enc (str): Encryption algorithm - alg (str): kwy wrap/negotiation algorithm - key (Key): Key provided to encryption method - - Return: - (bytes, bytes): Tuple of (cek bytes and wrapped cek) - """ - if alg == ALGORITHMS.DIR: - cek, wrapped_cek = _get_direct_key_wrap_cek(key) - else: - cek, wrapped_cek = _get_key_wrap_cek(enc, key) - - return cek, wrapped_cek - - -def _get_direct_key_wrap_cek(key): - """ - Get the cek and wrapped cek from the encryption key direct - - Args: - key (Key): Key provided to encryption method - - Return: - (Key, bytes): Tuple of (cek Key object and wrapped cek) - """ - # Get the JWK data to determine how to derive the cek - jwk_data = key.to_dict() - if jwk_data["kty"] == "oct": - # Get the last half of an octal key as the cek - cek_bytes = _get_key_bytes_from_key(key) - wrapped_cek = b"" - else: - raise NotImplementedError("JWK type {} not supported!".format(jwk_data["kty"])) - return cek_bytes, wrapped_cek - - -def _get_key_bytes_from_key(key): - """ - Get the raw key bytes from a Key object - - Args: - key (Key): Key from which to extract the raw key bytes - Returns: - (bytes) key data - """ - jwk_data = key.to_dict() - encoded_key = jwk_data["k"] - cek_bytes = base64url_decode(encoded_key) - return cek_bytes - - -def _get_key_wrap_cek(enc, key): - """_get_rsa_key_wrap_cek - Get the content encryption key for RSA key wrap - - Args: - enc (str): Encryption algorithm - key (Key): Key provided to encryption method - - Returns: - (Key, bytes): Tuple of (cek Key object and wrapped cek) - """ - cek_bytes = _get_random_cek_bytes_for_enc(enc) - wrapped_cek = key.wrap_key(cek_bytes) - return cek_bytes, wrapped_cek - - -def _get_random_cek_bytes_for_enc(enc): - """ - Get the random cek bytes based on the encryption algorithm - - Args: - enc (str): Encryption algorithm - - Returns: - (bytes) random bytes for cek key - """ - if enc == ALGORITHMS.A128GCM: - num_bits = 128 - elif enc == ALGORITHMS.A192GCM: - num_bits = 192 - elif enc in (ALGORITHMS.A128CBC_HS256, ALGORITHMS.A256GCM): - num_bits = 256 - elif enc == ALGORITHMS.A192CBC_HS384: - num_bits = 384 - elif enc == ALGORITHMS.A256CBC_HS512: - num_bits = 512 - else: - raise NotImplementedError(f"{enc} not supported") - cek_bytes = get_random_bytes(num_bits // 8) - return cek_bytes - - -def _auth_tag(ciphertext, iv, aad, mac_key, tag_length): - """ - Get ann auth tag from the provided data - - Args: - ciphertext (bytes): Encrypted value - iv (bytes): Initialization vector - aad (bytes): Additional Authenticated Data - mac_key (bytes): Key to use in generating the MAC - tag_length (int): How log the tag should be - - Returns: - (bytes) Auth tag - """ - al = _big_endian(len(aad) * 8) - auth_tag_input = aad + iv + ciphertext + al - signature = mac_key.sign(auth_tag_input) - auth_tag = signature[0:tag_length] - return auth_tag - - -def _jwe_compact_serialize(encoded_header, encrypted_cek, iv, cipher_text, auth_tag): - """ - Generate a compact serialized JWE - - Args: - encoded_header (bytes): Base64 URL Encoded JWE header JSON - encrypted_cek (bytes): Encrypted content encryption key (cek) - iv (bytes): Initialization vector (IV) - cipher_text (bytes): Cipher text - auth_tag (bytes): JWE Auth Tag - - Returns: - (str): JWE compact serialized string - """ - cipher_text = ensure_binary(cipher_text) - encoded_encrypted_cek = base64url_encode(encrypted_cek) - encoded_iv = base64url_encode(iv) - encoded_cipher_text = base64url_encode(cipher_text) - encoded_auth_tag = base64url_encode(auth_tag) - return ( - encoded_header - + b"." - + encoded_encrypted_cek - + b"." - + encoded_iv - + b"." - + encoded_cipher_text - + b"." - + encoded_auth_tag - ) diff --git a/venv/lib/python3.12/site-packages/jose/jwk.py b/venv/lib/python3.12/site-packages/jose/jwk.py deleted file mode 100644 index 2a318475..00000000 --- a/venv/lib/python3.12/site-packages/jose/jwk.py +++ /dev/null @@ -1,79 +0,0 @@ -from jose.backends.base import Key -from jose.constants import ALGORITHMS -from jose.exceptions import JWKError - -try: - from jose.backends import RSAKey # noqa: F401 -except ImportError: - pass - -try: - from jose.backends import ECKey # noqa: F401 -except ImportError: - pass - -try: - from jose.backends import AESKey # noqa: F401 -except ImportError: - pass - -try: - from jose.backends import DIRKey # noqa: F401 -except ImportError: - pass - -try: - from jose.backends import HMACKey # noqa: F401 -except ImportError: - pass - - -def get_key(algorithm): - if algorithm in ALGORITHMS.KEYS: - return ALGORITHMS.KEYS[algorithm] - elif algorithm in ALGORITHMS.HMAC: # noqa: F811 - return HMACKey - elif algorithm in ALGORITHMS.RSA: - from jose.backends import RSAKey # noqa: F811 - - return RSAKey - elif algorithm in ALGORITHMS.EC: - from jose.backends import ECKey # noqa: F811 - - return ECKey - elif algorithm in ALGORITHMS.AES: - from jose.backends import AESKey # noqa: F811 - - return AESKey - elif algorithm == ALGORITHMS.DIR: - from jose.backends import DIRKey # noqa: F811 - - return DIRKey - return None - - -def register_key(algorithm, key_class): - if not issubclass(key_class, Key): - raise TypeError("Key class is not a subclass of jwk.Key") - ALGORITHMS.KEYS[algorithm] = key_class - ALGORITHMS.SUPPORTED.add(algorithm) - return True - - -def construct(key_data, algorithm=None): - """ - Construct a Key object for the given algorithm with the given - key_data. - """ - - # Allow for pulling the algorithm off of the passed in jwk. - if not algorithm and isinstance(key_data, dict): - algorithm = key_data.get("alg", None) - - if not algorithm: - raise JWKError("Unable to find an algorithm for key") - - key_class = get_key(algorithm) - if not key_class: - raise JWKError("Unable to find an algorithm for key") - return key_class(key_data, algorithm) diff --git a/venv/lib/python3.12/site-packages/jose/jws.py b/venv/lib/python3.12/site-packages/jose/jws.py deleted file mode 100644 index 27f6b79a..00000000 --- a/venv/lib/python3.12/site-packages/jose/jws.py +++ /dev/null @@ -1,268 +0,0 @@ -import binascii -import json - -try: - from collections.abc import Iterable, Mapping -except ImportError: - from collections import Mapping, Iterable - -from jose import jwk -from jose.backends.base import Key -from jose.constants import ALGORITHMS -from jose.exceptions import JWSError, JWSSignatureError -from jose.utils import base64url_decode, base64url_encode - - -def sign(payload, key, headers=None, algorithm=ALGORITHMS.HS256): - """Signs a claims set and returns a JWS string. - - Args: - payload (str or dict): A string to sign - key (str or dict): The key to use for signing the claim set. Can be - individual JWK or JWK set. - headers (dict, optional): A set of headers that will be added to - the default headers. Any headers that are added as additional - headers will override the default headers. - algorithm (str, optional): The algorithm to use for signing the - the claims. Defaults to HS256. - - Returns: - str: The string representation of the header, claims, and signature. - - Raises: - JWSError: If there is an error signing the token. - - Examples: - - >>> jws.sign({'a': 'b'}, 'secret', algorithm='HS256') - 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' - - """ - - if algorithm not in ALGORITHMS.SUPPORTED: - raise JWSError("Algorithm %s not supported." % algorithm) - - encoded_header = _encode_header(algorithm, additional_headers=headers) - encoded_payload = _encode_payload(payload) - signed_output = _sign_header_and_claims(encoded_header, encoded_payload, algorithm, key) - - return signed_output - - -def verify(token, key, algorithms, verify=True): - """Verifies a JWS string's signature. - - Args: - token (str): A signed JWS to be verified. - key (str or dict): A key to attempt to verify the payload with. Can be - individual JWK or JWK set. - algorithms (str or list): Valid algorithms that should be used to verify the JWS. - - Returns: - str: The str representation of the payload, assuming the signature is valid. - - Raises: - JWSError: If there is an exception verifying a token. - - Examples: - - >>> token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' - >>> jws.verify(token, 'secret', algorithms='HS256') - - """ - - header, payload, signing_input, signature = _load(token) - - if verify: - _verify_signature(signing_input, header, signature, key, algorithms) - - return payload - - -def get_unverified_header(token): - """Returns the decoded headers without verification of any kind. - - Args: - token (str): A signed JWS to decode the headers from. - - Returns: - dict: The dict representation of the token headers. - - Raises: - JWSError: If there is an exception decoding the token. - """ - header, claims, signing_input, signature = _load(token) - return header - - -def get_unverified_headers(token): - """Returns the decoded headers without verification of any kind. - - This is simply a wrapper of get_unverified_header() for backwards - compatibility. - - Args: - token (str): A signed JWS to decode the headers from. - - Returns: - dict: The dict representation of the token headers. - - Raises: - JWSError: If there is an exception decoding the token. - """ - return get_unverified_header(token) - - -def get_unverified_claims(token): - """Returns the decoded claims without verification of any kind. - - Args: - token (str): A signed JWS to decode the headers from. - - Returns: - str: The str representation of the token claims. - - Raises: - JWSError: If there is an exception decoding the token. - """ - header, claims, signing_input, signature = _load(token) - return claims - - -def _encode_header(algorithm, additional_headers=None): - header = {"typ": "JWT", "alg": algorithm} - - if additional_headers: - header.update(additional_headers) - - json_header = json.dumps( - header, - separators=(",", ":"), - sort_keys=True, - ).encode("utf-8") - - return base64url_encode(json_header) - - -def _encode_payload(payload): - if isinstance(payload, Mapping): - try: - payload = json.dumps( - payload, - separators=(",", ":"), - ).encode("utf-8") - except ValueError: - pass - - return base64url_encode(payload) - - -def _sign_header_and_claims(encoded_header, encoded_claims, algorithm, key): - signing_input = b".".join([encoded_header, encoded_claims]) - try: - if not isinstance(key, Key): - key = jwk.construct(key, algorithm) - signature = key.sign(signing_input) - except Exception as e: - raise JWSError(e) - - encoded_signature = base64url_encode(signature) - - encoded_string = b".".join([encoded_header, encoded_claims, encoded_signature]) - - return encoded_string.decode("utf-8") - - -def _load(jwt): - if isinstance(jwt, str): - jwt = jwt.encode("utf-8") - try: - signing_input, crypto_segment = jwt.rsplit(b".", 1) - header_segment, claims_segment = signing_input.split(b".", 1) - header_data = base64url_decode(header_segment) - except ValueError: - raise JWSError("Not enough segments") - except (TypeError, binascii.Error): - raise JWSError("Invalid header padding") - - try: - header = json.loads(header_data.decode("utf-8")) - except ValueError as e: - raise JWSError("Invalid header string: %s" % e) - - if not isinstance(header, Mapping): - raise JWSError("Invalid header string: must be a json object") - - try: - payload = base64url_decode(claims_segment) - except (TypeError, binascii.Error): - raise JWSError("Invalid payload padding") - - try: - signature = base64url_decode(crypto_segment) - except (TypeError, binascii.Error): - raise JWSError("Invalid crypto padding") - - return (header, payload, signing_input, signature) - - -def _sig_matches_keys(keys, signing_input, signature, alg): - for key in keys: - if not isinstance(key, Key): - key = jwk.construct(key, alg) - try: - if key.verify(signing_input, signature): - return True - except Exception: - pass - return False - - -def _get_keys(key): - if isinstance(key, Key): - return (key,) - - try: - key = json.loads(key, parse_int=str, parse_float=str) - except Exception: - pass - - if isinstance(key, Mapping): - if "keys" in key: - # JWK Set per RFC 7517 - return key["keys"] - elif "kty" in key: - # Individual JWK per RFC 7517 - return (key,) - else: - # Some other mapping. Firebase uses just dict of kid, cert pairs - values = key.values() - if values: - return values - return (key,) - - # Iterable but not text or mapping => list- or tuple-like - elif isinstance(key, Iterable) and not (isinstance(key, str) or isinstance(key, bytes)): - return key - - # Scalar value, wrap in tuple. - else: - return (key,) - - -def _verify_signature(signing_input, header, signature, key="", algorithms=None): - alg = header.get("alg") - if not alg: - raise JWSError("No algorithm was specified in the JWS header.") - - if algorithms is not None and alg not in algorithms: - raise JWSError("The specified alg value is not allowed") - - keys = _get_keys(key) - try: - if not _sig_matches_keys(keys, signing_input, signature, alg): - raise JWSSignatureError() - except JWSSignatureError: - raise JWSError("Signature verification failed.") - except JWSError: - raise JWSError("Invalid or unsupported algorithm: %s" % alg) diff --git a/venv/lib/python3.12/site-packages/jose/jwt.py b/venv/lib/python3.12/site-packages/jose/jwt.py deleted file mode 100644 index f47e4ddf..00000000 --- a/venv/lib/python3.12/site-packages/jose/jwt.py +++ /dev/null @@ -1,512 +0,0 @@ -import json -from calendar import timegm -from datetime import datetime, timedelta - -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping - -try: - from datetime import UTC # Preferred in Python 3.13+ -except ImportError: - from datetime import timezone - - UTC = timezone.utc # Preferred in Python 3.12 and below - -from jose import jws - -from .constants import ALGORITHMS -from .exceptions import ExpiredSignatureError, JWSError, JWTClaimsError, JWTError -from .utils import calculate_at_hash, timedelta_total_seconds - - -def encode(claims, key, algorithm=ALGORITHMS.HS256, headers=None, access_token=None): - """Encodes a claims set and returns a JWT string. - - JWTs are JWS signed objects with a few reserved claims. - - Args: - claims (dict): A claims set to sign - key (str or dict): The key to use for signing the claim set. Can be - individual JWK or JWK set. - algorithm (str, optional): The algorithm to use for signing the - the claims. Defaults to HS256. - headers (dict, optional): A set of headers that will be added to - the default headers. Any headers that are added as additional - headers will override the default headers. - access_token (str, optional): If present, the 'at_hash' claim will - be calculated and added to the claims present in the 'claims' - parameter. - - Returns: - str: The string representation of the header, claims, and signature. - - Raises: - JWTError: If there is an error encoding the claims. - - Examples: - - >>> jwt.encode({'a': 'b'}, 'secret', algorithm='HS256') - 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' - - """ - - for time_claim in ["exp", "iat", "nbf"]: - # Convert datetime to a intDate value in known time-format claims - if isinstance(claims.get(time_claim), datetime): - claims[time_claim] = timegm(claims[time_claim].utctimetuple()) - - if access_token: - claims["at_hash"] = calculate_at_hash(access_token, ALGORITHMS.HASHES[algorithm]) - - return jws.sign(claims, key, headers=headers, algorithm=algorithm) - - -def decode(token, key, algorithms=None, options=None, audience=None, issuer=None, subject=None, access_token=None): - """Verifies a JWT string's signature and validates reserved claims. - - Args: - token (str): A signed JWS to be verified. - key (str or iterable): A key to attempt to verify the payload with. - This can be simple string with an individual key (e.g. "a1234"), - a tuple or list of keys (e.g. ("a1234...", "b3579"), - a JSON string, (e.g. '["a1234", "b3579"]'), - a dict with the 'keys' key that gives a tuple or list of keys (e.g {'keys': [...]} ) or - a dict or JSON string for a JWK set as defined by RFC 7517 (e.g. - {'keys': [{'kty': 'oct', 'k': 'YTEyMzQ'}, {'kty': 'oct', 'k':'YjM1Nzk'}]} or - '{"keys": [{"kty":"oct","k":"YTEyMzQ"},{"kty":"oct","k":"YjM1Nzk"}]}' - ) in which case the keys must be base64 url safe encoded (with optional padding). - algorithms (str or list): Valid algorithms that should be used to verify the JWS. - audience (str): The intended audience of the token. If the "aud" claim is - included in the claim set, then the audience must be included and must equal - the provided claim. - issuer (str or iterable): Acceptable value(s) for the issuer of the token. - If the "iss" claim is included in the claim set, then the issuer must be - given and the claim in the token must be among the acceptable values. - subject (str): The subject of the token. If the "sub" claim is - included in the claim set, then the subject must be included and must equal - the provided claim. - access_token (str): An access token string. If the "at_hash" claim is included in the - claim set, then the access_token must be included, and it must match - the "at_hash" claim. - options (dict): A dictionary of options for skipping validation steps. - - defaults = { - 'verify_signature': True, - 'verify_aud': True, - 'verify_iat': True, - 'verify_exp': True, - 'verify_nbf': True, - 'verify_iss': True, - 'verify_sub': True, - 'verify_jti': True, - 'verify_at_hash': True, - 'require_aud': False, - 'require_iat': False, - 'require_exp': False, - 'require_nbf': False, - 'require_iss': False, - 'require_sub': False, - 'require_jti': False, - 'require_at_hash': False, - 'leeway': 0, - } - - Returns: - dict: The dict representation of the claims set, assuming the signature is valid - and all requested data validation passes. - - Raises: - JWTError: If the signature is invalid in any way. - ExpiredSignatureError: If the signature has expired. - JWTClaimsError: If any claim is invalid in any way. - - Examples: - - >>> payload = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8' - >>> jwt.decode(payload, 'secret', algorithms='HS256') - - """ - - defaults = { - "verify_signature": True, - "verify_aud": True, - "verify_iat": True, - "verify_exp": True, - "verify_nbf": True, - "verify_iss": True, - "verify_sub": True, - "verify_jti": True, - "verify_at_hash": True, - "require_aud": False, - "require_iat": False, - "require_exp": False, - "require_nbf": False, - "require_iss": False, - "require_sub": False, - "require_jti": False, - "require_at_hash": False, - "leeway": 0, - } - - if options: - defaults.update(options) - - verify_signature = defaults.get("verify_signature", True) - - try: - payload = jws.verify(token, key, algorithms, verify=verify_signature) - except JWSError as e: - raise JWTError(e) - - # Needed for at_hash verification - algorithm = jws.get_unverified_header(token)["alg"] - - try: - claims = json.loads(payload.decode("utf-8")) - except ValueError as e: - raise JWTError("Invalid payload string: %s" % e) - - if not isinstance(claims, Mapping): - raise JWTError("Invalid payload string: must be a json object") - - _validate_claims( - claims, - audience=audience, - issuer=issuer, - subject=subject, - algorithm=algorithm, - access_token=access_token, - options=defaults, - ) - - return claims - - -def get_unverified_header(token): - """Returns the decoded headers without verification of any kind. - - Args: - token (str): A signed JWT to decode the headers from. - - Returns: - dict: The dict representation of the token headers. - - Raises: - JWTError: If there is an exception decoding the token. - """ - try: - headers = jws.get_unverified_headers(token) - except Exception: - raise JWTError("Error decoding token headers.") - - return headers - - -def get_unverified_headers(token): - """Returns the decoded headers without verification of any kind. - - This is simply a wrapper of get_unverified_header() for backwards - compatibility. - - Args: - token (str): A signed JWT to decode the headers from. - - Returns: - dict: The dict representation of the token headers. - - Raises: - JWTError: If there is an exception decoding the token. - """ - return get_unverified_header(token) - - -def get_unverified_claims(token): - """Returns the decoded claims without verification of any kind. - - Args: - token (str): A signed JWT to decode the headers from. - - Returns: - dict: The dict representation of the token claims. - - Raises: - JWTError: If there is an exception decoding the token. - """ - try: - claims = jws.get_unverified_claims(token) - except Exception: - raise JWTError("Error decoding token claims.") - - try: - claims = json.loads(claims.decode("utf-8")) - except ValueError as e: - raise JWTError("Invalid claims string: %s" % e) - - if not isinstance(claims, Mapping): - raise JWTError("Invalid claims string: must be a json object") - - return claims - - -def _validate_iat(claims): - """Validates that the 'iat' claim is valid. - - The "iat" (issued at) claim identifies the time at which the JWT was - issued. This claim can be used to determine the age of the JWT. Its - value MUST be a number containing a NumericDate value. Use of this - claim is OPTIONAL. - - Args: - claims (dict): The claims dictionary to validate. - """ - - if "iat" not in claims: - return - - try: - int(claims["iat"]) - except ValueError: - raise JWTClaimsError("Issued At claim (iat) must be an integer.") - - -def _validate_nbf(claims, leeway=0): - """Validates that the 'nbf' claim is valid. - - The "nbf" (not before) claim identifies the time before which the JWT - MUST NOT be accepted for processing. The processing of the "nbf" - claim requires that the current date/time MUST be after or equal to - the not-before date/time listed in the "nbf" claim. Implementers MAY - provide for some small leeway, usually no more than a few minutes, to - account for clock skew. Its value MUST be a number containing a - NumericDate value. Use of this claim is OPTIONAL. - - Args: - claims (dict): The claims dictionary to validate. - leeway (int): The number of seconds of skew that is allowed. - """ - - if "nbf" not in claims: - return - - try: - nbf = int(claims["nbf"]) - except ValueError: - raise JWTClaimsError("Not Before claim (nbf) must be an integer.") - - now = timegm(datetime.now(UTC).utctimetuple()) - - if nbf > (now + leeway): - raise JWTClaimsError("The token is not yet valid (nbf)") - - -def _validate_exp(claims, leeway=0): - """Validates that the 'exp' claim is valid. - - The "exp" (expiration time) claim identifies the expiration time on - or after which the JWT MUST NOT be accepted for processing. The - processing of the "exp" claim requires that the current date/time - MUST be before the expiration date/time listed in the "exp" claim. - Implementers MAY provide for some small leeway, usually no more than - a few minutes, to account for clock skew. Its value MUST be a number - containing a NumericDate value. Use of this claim is OPTIONAL. - - Args: - claims (dict): The claims dictionary to validate. - leeway (int): The number of seconds of skew that is allowed. - """ - - if "exp" not in claims: - return - - try: - exp = int(claims["exp"]) - except ValueError: - raise JWTClaimsError("Expiration Time claim (exp) must be an integer.") - - now = timegm(datetime.now(UTC).utctimetuple()) - - if exp < (now - leeway): - raise ExpiredSignatureError("Signature has expired.") - - -def _validate_aud(claims, audience=None): - """Validates that the 'aud' claim is valid. - - The "aud" (audience) claim identifies the recipients that the JWT is - intended for. Each principal intended to process the JWT MUST - identify itself with a value in the audience claim. If the principal - processing the claim does not identify itself with a value in the - "aud" claim when this claim is present, then the JWT MUST be - rejected. In the general case, the "aud" value is an array of case- - sensitive strings, each containing a StringOrURI value. In the - special case when the JWT has one audience, the "aud" value MAY be a - single case-sensitive string containing a StringOrURI value. The - interpretation of audience values is generally application specific. - Use of this claim is OPTIONAL. - - Args: - claims (dict): The claims dictionary to validate. - audience (str): The audience that is verifying the token. - """ - - if "aud" not in claims: - # if audience: - # raise JWTError('Audience claim expected, but not in claims') - return - - audience_claims = claims["aud"] - if isinstance(audience_claims, str): - audience_claims = [audience_claims] - if not isinstance(audience_claims, list): - raise JWTClaimsError("Invalid claim format in token") - if any(not isinstance(c, str) for c in audience_claims): - raise JWTClaimsError("Invalid claim format in token") - if audience not in audience_claims: - raise JWTClaimsError("Invalid audience") - - -def _validate_iss(claims, issuer=None): - """Validates that the 'iss' claim is valid. - - The "iss" (issuer) claim identifies the principal that issued the - JWT. The processing of this claim is generally application specific. - The "iss" value is a case-sensitive string containing a StringOrURI - value. Use of this claim is OPTIONAL. - - Args: - claims (dict): The claims dictionary to validate. - issuer (str or iterable): Acceptable value(s) for the issuer that - signed the token. - """ - - if issuer is not None: - if isinstance(issuer, str): - issuer = (issuer,) - if claims.get("iss") not in issuer: - raise JWTClaimsError("Invalid issuer") - - -def _validate_sub(claims, subject=None): - """Validates that the 'sub' claim is valid. - - The "sub" (subject) claim identifies the principal that is the - subject of the JWT. The claims in a JWT are normally statements - about the subject. The subject value MUST either be scoped to be - locally unique in the context of the issuer or be globally unique. - The processing of this claim is generally application specific. The - "sub" value is a case-sensitive string containing a StringOrURI - value. Use of this claim is OPTIONAL. - - Arg - claims (dict): The claims dictionary to validate. - subject (str): The subject of the token. - """ - - if "sub" not in claims: - return - - if not isinstance(claims["sub"], str): - raise JWTClaimsError("Subject must be a string.") - - if subject is not None: - if claims.get("sub") != subject: - raise JWTClaimsError("Invalid subject") - - -def _validate_jti(claims): - """Validates that the 'jti' claim is valid. - - The "jti" (JWT ID) claim provides a unique identifier for the JWT. - The identifier value MUST be assigned in a manner that ensures that - there is a negligible probability that the same value will be - accidentally assigned to a different data object; if the application - uses multiple issuers, collisions MUST be prevented among values - produced by different issuers as well. The "jti" claim can be used - to prevent the JWT from being replayed. The "jti" value is a case- - sensitive string. Use of this claim is OPTIONAL. - - Args: - claims (dict): The claims dictionary to validate. - """ - if "jti" not in claims: - return - - if not isinstance(claims["jti"], str): - raise JWTClaimsError("JWT ID must be a string.") - - -def _validate_at_hash(claims, access_token, algorithm): - """ - Validates that the 'at_hash' is valid. - - Its value is the base64url encoding of the left-most half of the hash - of the octets of the ASCII representation of the access_token value, - where the hash algorithm used is the hash algorithm used in the alg - Header Parameter of the ID Token's JOSE Header. For instance, if the - alg is RS256, hash the access_token value with SHA-256, then take the - left-most 128 bits and base64url encode them. The at_hash value is a - case sensitive string. Use of this claim is OPTIONAL. - - Args: - claims (dict): The claims dictionary to validate. - access_token (str): The access token returned by the OpenID Provider. - algorithm (str): The algorithm used to sign the JWT, as specified by - the token headers. - """ - if "at_hash" not in claims: - return - - if not access_token: - msg = "No access_token provided to compare against at_hash claim." - raise JWTClaimsError(msg) - - try: - expected_hash = calculate_at_hash(access_token, ALGORITHMS.HASHES[algorithm]) - except (TypeError, ValueError): - msg = "Unable to calculate at_hash to verify against token claims." - raise JWTClaimsError(msg) - - if claims["at_hash"] != expected_hash: - raise JWTClaimsError("at_hash claim does not match access_token.") - - -def _validate_claims(claims, audience=None, issuer=None, subject=None, algorithm=None, access_token=None, options=None): - leeway = options.get("leeway", 0) - - if isinstance(leeway, timedelta): - leeway = timedelta_total_seconds(leeway) - required_claims = [e[len("require_") :] for e in options.keys() if e.startswith("require_") and options[e]] - for require_claim in required_claims: - if require_claim not in claims: - raise JWTError('missing required key "%s" among claims' % require_claim) - else: - options["verify_" + require_claim] = True # override verify when required - - if not isinstance(audience, ((str,), type(None))): - raise JWTError("audience must be a string or None") - - if options.get("verify_iat"): - _validate_iat(claims) - - if options.get("verify_nbf"): - _validate_nbf(claims, leeway=leeway) - - if options.get("verify_exp"): - _validate_exp(claims, leeway=leeway) - - if options.get("verify_aud"): - _validate_aud(claims, audience=audience) - - if options.get("verify_iss"): - _validate_iss(claims, issuer=issuer) - - if options.get("verify_sub"): - _validate_sub(claims, subject=subject) - - if options.get("verify_jti"): - _validate_jti(claims) - - if options.get("verify_at_hash"): - _validate_at_hash(claims, access_token, algorithm) diff --git a/venv/lib/python3.12/site-packages/jose/utils.py b/venv/lib/python3.12/site-packages/jose/utils.py deleted file mode 100644 index d62cafb0..00000000 --- a/venv/lib/python3.12/site-packages/jose/utils.py +++ /dev/null @@ -1,165 +0,0 @@ -import base64 -import re -import struct - -# Piggyback of the backends implementation of the function that converts a long -# to a bytes stream. Some plumbing is necessary to have the signatures match. -try: - from cryptography.utils import int_to_bytes as _long_to_bytes - - def long_to_bytes(n, blocksize=0): - return _long_to_bytes(n, blocksize or None) - -except ImportError: - from ecdsa.ecdsa import int_to_string as _long_to_bytes - - def long_to_bytes(n, blocksize=0): - ret = _long_to_bytes(n) - if blocksize == 0: - return ret - else: - assert len(ret) <= blocksize - padding = blocksize - len(ret) - return b"\x00" * padding + ret - - -def long_to_base64(data, size=0): - return base64.urlsafe_b64encode(long_to_bytes(data, size)).strip(b"=") - - -def int_arr_to_long(arr): - return int("".join(["%02x" % byte for byte in arr]), 16) - - -def base64_to_long(data): - if isinstance(data, str): - data = data.encode("ascii") - - # urlsafe_b64decode will happily convert b64encoded data - _d = base64.urlsafe_b64decode(bytes(data) + b"==") - return int_arr_to_long(struct.unpack("%sB" % len(_d), _d)) - - -def calculate_at_hash(access_token, hash_alg): - """Helper method for calculating an access token - hash, as described in http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken - - Its value is the base64url encoding of the left-most half of the hash of the octets - of the ASCII representation of the access_token value, where the hash algorithm - used is the hash algorithm used in the alg Header Parameter of the ID Token's JOSE - Header. For instance, if the alg is RS256, hash the access_token value with SHA-256, - then take the left-most 128 bits and base64url encode them. The at_hash value is a - case sensitive string. - - Args: - access_token (str): An access token string. - hash_alg (callable): A callable returning a hash object, e.g. hashlib.sha256 - - """ - hash_digest = hash_alg(access_token.encode("utf-8")).digest() - cut_at = int(len(hash_digest) / 2) - truncated = hash_digest[:cut_at] - at_hash = base64url_encode(truncated) - return at_hash.decode("utf-8") - - -def base64url_decode(input): - """Helper method to base64url_decode a string. - - Args: - input (bytes): A base64url_encoded string (bytes) to decode. - - """ - rem = len(input) % 4 - - if rem > 0: - input += b"=" * (4 - rem) - - return base64.urlsafe_b64decode(input) - - -def base64url_encode(input): - """Helper method to base64url_encode a string. - - Args: - input (bytes): A base64url_encoded string (bytes) to encode. - - """ - return base64.urlsafe_b64encode(input).replace(b"=", b"") - - -def timedelta_total_seconds(delta): - """Helper method to determine the total number of seconds - from a timedelta. - - Args: - delta (timedelta): A timedelta to convert to seconds. - """ - return delta.days * 24 * 60 * 60 + delta.seconds - - -def ensure_binary(s): - """Coerce **s** to bytes.""" - - if isinstance(s, bytes): - return s - if isinstance(s, str): - return s.encode("utf-8", "strict") - raise TypeError(f"not expecting type '{type(s)}'") - - -# The following was copied from PyJWT: -# https://github.com/jpadilla/pyjwt/commit/9c528670c455b8d948aff95ed50e22940d1ad3fc -# Based on: -# https://github.com/hynek/pem/blob/7ad94db26b0bc21d10953f5dbad3acfdfacf57aa/src/pem/_core.py#L224-L252 -_PEMS = { - b"CERTIFICATE", - b"TRUSTED CERTIFICATE", - b"PRIVATE KEY", - b"PUBLIC KEY", - b"ENCRYPTED PRIVATE KEY", - b"OPENSSH PRIVATE KEY", - b"DSA PRIVATE KEY", - b"RSA PRIVATE KEY", - b"RSA PUBLIC KEY", - b"EC PRIVATE KEY", - b"DH PARAMETERS", - b"NEW CERTIFICATE REQUEST", - b"CERTIFICATE REQUEST", - b"SSH2 PUBLIC KEY", - b"SSH2 ENCRYPTED PRIVATE KEY", - b"X509 CRL", -} -_PEM_RE = re.compile( - b"----[- ]BEGIN (" + b"|".join(re.escape(pem) for pem in _PEMS) + b")[- ]----", -) - - -def is_pem_format(key: bytes) -> bool: - return bool(_PEM_RE.search(key)) - - -# Based on -# https://github.com/pyca/cryptography/blob/bcb70852d577b3f490f015378c75cba74986297b -# /src/cryptography/hazmat/primitives/serialization/ssh.py#L40-L46 -_CERT_SUFFIX = b"-cert-v01@openssh.com" -_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") -_SSH_KEY_FORMATS = [ - b"ssh-ed25519", - b"ssh-rsa", - b"ssh-dss", - b"ecdsa-sha2-nistp256", - b"ecdsa-sha2-nistp384", - b"ecdsa-sha2-nistp521", -] - - -def is_ssh_key(key: bytes) -> bool: - if any(string_value in key for string_value in _SSH_KEY_FORMATS): - return True - ssh_pubkey_match = _SSH_PUBKEY_RC.match(key) - if ssh_pubkey_match: - key_type = ssh_pubkey_match.group(1) - if _CERT_SUFFIX == key_type[-len(_CERT_SUFFIX) :]: - return True - return False diff --git a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/LICENSE b/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/LICENSE deleted file mode 100644 index 176437a5..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2022 Roman Right (Korolev) - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/METADATA b/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/METADATA deleted file mode 100644 index e1e1d192..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/METADATA +++ /dev/null @@ -1,82 +0,0 @@ -Metadata-Version: 2.1 -Name: lazy-model -Version: 0.3.0 -Summary: -License: Apache-2.0 -Author: Roman Right -Author-email: roman-right@protonmail.com -Requires-Python: >=3.8,<4.0 -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Requires-Dist: pydantic (>=1.9.0) -Description-Content-Type: text/markdown - -# Lazy parsing for Pydantic models - -This library provides a lazy interface for parsing objects from dictionaries. During the parsing, it saves the raw data inside the object and parses each field on demand. - -## Install - -poetry -```shell -poetry add lazy-model -``` - -pip -```shell -pip install lazy-model -``` - -## Usage - -```python -from lazy_model import LazyModel -from pydantic import validator - - -class Sample(LazyModel): - i: int - s: str - - @validator("s") - def s_upper(cls, v): - return v.upper() - - -obj = Sample.lazy_parse({"i": "10", "s": "test"}) - -# at this point the data is stored in a raw format inside the object - -print(obj.__dict__) - -# >>> {'i': NAO, 's': NAO} - -# NAO - Not An Object. It shows that the field was not parsed yet. - -print(obj.s) - -# >>> TEST - -# Custom validator works during lazy parsing - -print(obj.__dict__) - -# >>> {'i': NAO, 's': 'TEST'} - -# The `s` field was already parsed by this step - -print(obj.i, type(obj.i)) - -# >>> 10 - -# It converted `10` from string to int based on the annotations - -print(obj.__dict__) - -# >>> {'i': 10, 's': 'TEST'} - -# Everything was parsed -``` diff --git a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/RECORD b/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/RECORD deleted file mode 100644 index 4b77399f..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/RECORD +++ /dev/null @@ -1,18 +0,0 @@ -LICENSE,sha256=D3gfY_wsRWlda7bd4-JGuxS6ajbszZn_4zPLd_IR2BE,11351 -lazy_model-0.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -lazy_model-0.3.0.dist-info/LICENSE,sha256=D3gfY_wsRWlda7bd4-JGuxS6ajbszZn_4zPLd_IR2BE,11351 -lazy_model-0.3.0.dist-info/METADATA,sha256=T4HFhQJhUaqfrr0EemMW5GXQtXtzdq4URMGmOPJlQp0,1627 -lazy_model-0.3.0.dist-info/RECORD,, -lazy_model-0.3.0.dist-info/WHEEL,sha256=bbU3AyvhQ312rVm7zzRQjs6axI1UYWC3nmFA2E6FFSI,88 -lazy_model/__init__.py,sha256=kHZyflW_i7vQdJ4Lq8_p2fMQzXW6lYI7pwWUFdCkd0I,101 -lazy_model/__pycache__/__init__.cpython-312.pyc,, -lazy_model/__pycache__/main.cpython-312.pyc,, -lazy_model/__pycache__/nao.cpython-312.pyc,, -lazy_model/main.py,sha256=Jnere_uEFWxZw-2cgW-EG12YtNLzV-guw8kVZw8V7jA,351 -lazy_model/nao.py,sha256=V-RuBlFyqydCvPFMGyb5yxDSzxK036IR_P2TR4a5zFU,86 -lazy_model/parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -lazy_model/parser/__pycache__/__init__.cpython-312.pyc,, -lazy_model/parser/__pycache__/new.cpython-312.pyc,, -lazy_model/parser/__pycache__/old.cpython-312.pyc,, -lazy_model/parser/new.py,sha256=c667YfZtjVZF5LW5Ouzn5awRILD4F-0CDAKyKBlESTo,4288 -lazy_model/parser/old.py,sha256=zu7nkYcfp0b-rE1KM5iu7UoMpEGD3ZlQlArhHv96cco,4399 diff --git a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/WHEEL b/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/WHEEL deleted file mode 100644 index 2892f30b..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model-0.3.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: poetry-core 1.1.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/venv/lib/python3.12/site-packages/lazy_model/__init__.py b/venv/lib/python3.12/site-packages/lazy_model/__init__.py deleted file mode 100644 index 7f650b1e..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from lazy_model.main import LazyModel -from lazy_model.nao import NAO - -__all__ = ["LazyModel", "NAO"] diff --git a/venv/lib/python3.12/site-packages/lazy_model/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/lazy_model/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 6b3f2641..00000000 Binary files a/venv/lib/python3.12/site-packages/lazy_model/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/lazy_model/__pycache__/main.cpython-312.pyc b/venv/lib/python3.12/site-packages/lazy_model/__pycache__/main.cpython-312.pyc deleted file mode 100644 index 910542a2..00000000 Binary files a/venv/lib/python3.12/site-packages/lazy_model/__pycache__/main.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/lazy_model/__pycache__/nao.cpython-312.pyc b/venv/lib/python3.12/site-packages/lazy_model/__pycache__/nao.cpython-312.pyc deleted file mode 100644 index 89c53ca0..00000000 Binary files a/venv/lib/python3.12/site-packages/lazy_model/__pycache__/nao.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/lazy_model/main.py b/venv/lib/python3.12/site-packages/lazy_model/main.py deleted file mode 100644 index 86fac5c6..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model/main.py +++ /dev/null @@ -1,12 +0,0 @@ -import pydantic - -major_version = int(pydantic.version.VERSION.split(".")[0]) - -if major_version == 1: - from lazy_model.parser.old import LazyModel # noqa: F401 -elif major_version == 2: - from lazy_model.parser.new import LazyModel # noqa: F401 -else: - raise NotImplementedError( - f"pydantic version {major_version} not supported" - ) diff --git a/venv/lib/python3.12/site-packages/lazy_model/nao.py b/venv/lib/python3.12/site-packages/lazy_model/nao.py deleted file mode 100644 index 5d2ce29e..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model/nao.py +++ /dev/null @@ -1,6 +0,0 @@ -class NotAnObject: - def __repr__(self): - return "NAO" - - -NAO = NotAnObject() diff --git a/venv/lib/python3.12/site-packages/lazy_model/parser/__init__.py b/venv/lib/python3.12/site-packages/lazy_model/parser/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index 66690a2d..00000000 Binary files a/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/__init__.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/new.cpython-312.pyc b/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/new.cpython-312.pyc deleted file mode 100644 index 9cf0329b..00000000 Binary files a/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/new.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/old.cpython-312.pyc b/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/old.cpython-312.pyc deleted file mode 100644 index 6ee766e9..00000000 Binary files a/venv/lib/python3.12/site-packages/lazy_model/parser/__pycache__/old.cpython-312.pyc and /dev/null differ diff --git a/venv/lib/python3.12/site-packages/lazy_model/parser/new.py b/venv/lib/python3.12/site-packages/lazy_model/parser/new.py deleted file mode 100644 index df7c5340..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model/parser/new.py +++ /dev/null @@ -1,121 +0,0 @@ -from typing import Any, Optional, Set, Dict - -import pydantic_core -from pydantic import BaseModel, PrivateAttr, ValidationError, TypeAdapter -from pydantic._internal import _fields - -from lazy_model.nao import NAO - -ROOT_KEY = "__root__" -_object_setattr = object.__setattr__ - - -class LazyModel(BaseModel): - _store: Dict[str, Any] = PrivateAttr(default_factory=dict) - _lazily_parsed: bool = PrivateAttr(default=False) - - @classmethod - def lazy_parse( - cls, - data: Dict[str, Any], - fields: Optional[Set[str]] = None, - ): - fields_values = {} - field_alias_map = {} - if fields is None: - fields = set() - for name, field in cls.model_fields.items(): - fields_values[name] = NAO - alias = field.alias or name - if alias in fields: - field_alias_map[alias] = name - field_set = set(fields_values.keys()) - m = cls.model_construct(field_set, **fields_values) - m._store = data - - for alias in fields: - m._set_attr(field_alias_map[alias], data[alias]) - return m - - def _parse_value(self, name, value): - field_type = self.__class__.model_fields.get(name).annotation - try: - value = TypeAdapter(field_type).validate_python(value) - except ValidationError as e: - if ( - value is None - and self.__class__.model_fields.get(name).required is False - ): - value = None - else: - raise e - return value - - def parse_store(self): - for name in self.__class__.model_fields: - self.__getattribute__(name) - - def _set_attr(self, name: str, value: Any) -> None: - """ - Stolen from Pydantic. - :param name: - :param value: - :return: - - TODO rework - """ - if name in self.__class_vars__: - raise AttributeError( - f"{name!r} is a ClassVar of `{self.__class__.__name__}` and cannot be set on an instance. " # noqa: E501 - f"If you want to set a value on the class, use `{self.__class__.__name__}.{name} = value`." # noqa: E501 - ) - elif not _fields.is_valid_field_name(name): - if ( - self.__pydantic_private__ is None - or name not in self.__private_attributes__ - ): - _object_setattr(self, name, value) - else: - attribute = self.__private_attributes__[name] - if hasattr(attribute, "__set__"): - attribute.__set__(self, value) # type: ignore - else: - self.__pydantic_private__[name] = value - return - elif self.model_config.get("frozen", None): - error: pydantic_core.InitErrorDetails = { - "type": "frozen_instance", - "loc": (name,), - "input": value, - } - raise pydantic_core.ValidationError.from_exception_data( - self.__class__.__name__, [error] - ) - - attr = getattr(self.__class__, name, None) - if isinstance(attr, property): - attr.__set__(self, value) - self.__pydantic_validator__.validate_assignment(self, name, value) - - def __getattribute__(self, item): - # If __class__ is accessed, return it directly to avoid recursion - if item == "__class__": - return super().__getattribute__(item) - - # If called on the class itself, delegate to super's __getattribute__ - if type(self) is type: # Check if self is a class - return super(type, self).__getattribute__(item) - - # For instances, use the object's __getattribute__ to prevent recursion - res = object.__getattribute__(self, item) - if res is NAO: - field_info = self.__class__.model_fields.get(item) - alias = field_info.alias or item - value = self._store.get(alias, NAO) - if value is NAO: - value = field_info.get_default() - else: - value = self._parse_value(item, value) - self._set_attr(item, value) - res = super(LazyModel, self).__getattribute__(item) - return res diff --git a/venv/lib/python3.12/site-packages/lazy_model/parser/old.py b/venv/lib/python3.12/site-packages/lazy_model/parser/old.py deleted file mode 100644 index ff580faf..00000000 --- a/venv/lib/python3.12/site-packages/lazy_model/parser/old.py +++ /dev/null @@ -1,128 +0,0 @@ -from typing import Any, Optional, Set, Dict - -from pydantic import BaseModel, PrivateAttr, parse_obj_as, ValidationError - -from pydantic.error_wrappers import ErrorWrapper - -from lazy_model.nao import NAO - -ROOT_KEY = "__root__" -object_setattr = object.__setattr__ - - -class LazyModel(BaseModel): - _store: Dict[str, Any] = PrivateAttr(default_factory=dict) - _lazily_parsed: bool = PrivateAttr(default=False) - - @classmethod - def lazy_parse( - cls, - data: Dict[str, Any], - fields: Optional[Set[str]] = None, - **new_kwargs, - ): - m = cls.__new__(cls, **new_kwargs) # type: ignore - if fields is None: - fields = set() - fields_values: Dict[str, Any] = {} - field_alias_map: Dict[str, str] = {} - for name, field in cls.__fields__.items(): - fields_values[name] = NAO - if field.alias in fields: - field_alias_map[field.alias] = name - object_setattr(m, "__dict__", fields_values) - _fields_set = set(fields_values.keys()) - object_setattr(m, "__fields_set__", _fields_set) - m._init_private_attributes() - - m._store = data - m._lazily_parsed = True - - for alias in fields: - m._set_attr(field_alias_map[alias], data[alias]) - return m - - def _set_attr(self, name, value): - """ - Stolen from Pydantic. - - # TODO rework - """ - field_type = self.__fields__.get(name).annotation - try: - value = parse_obj_as(field_type, value) - except ValidationError as e: - if value is None and self.__fields__.get(name).required is False: - value = None - else: - raise e - - new_values = {**self.__dict__, name: value} - - for validator in self.__pre_root_validators__: - try: - new_values = validator(self.__class__, new_values) - except (ValueError, TypeError, AssertionError) as exc: - raise ValidationError( - [ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__ - ) - - known_field = self.__fields__.get(name, None) - if known_field: - if not known_field.field_info.allow_mutation: - raise TypeError( - f'"{known_field.name}" has allow_mutation set ' - f"to False and cannot be assigned" - ) - dict_without_original_value = { - k: v for k, v in self.__dict__.items() if k != name - } - value, error_ = known_field.validate( - value, - dict_without_original_value, - loc=name, - cls=self.__class__, - ) - if error_: - raise ValidationError([error_], self.__class__) - else: - new_values[name] = value - - errors = [] - for skip_on_failure, validator in self.__post_root_validators__: - if skip_on_failure and errors: - continue - try: - new_values = validator(self.__class__, new_values) - except (ValueError, TypeError, AssertionError) as exc: - errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) - if errors: - raise ValidationError(errors, self.__class__) - - object_setattr(self, "__dict__", new_values) - - self.__fields_set__.add(name) - - def parse_store(self): - for name in self.__fields__: - self.__getattribute__(name) - - def __getattribute__(self, item): - # If __class__ is accessed, return it directly to avoid recursion - if item == "__class__": - return super().__getattribute__(item) - - # If called on the class itself, delegate to super's __getattribute__ - if type(self) is type: # Check if self is a class - return super(type, self).__getattribute__(item) - - # For instances, use the object's __getattribute__ to prevent recursion - res = object.__getattribute__(self, item) - if res is NAO: - field_info = self.__fields__.get(item) - value = self._store.get(field_info.alias, NAO) - if value is NAO: - value = field_info.get_default() - self._set_attr(item, value) - res = super(LazyModel, self).__getattribute__(item) - return res diff --git a/venv/lib/python3.12/site-packages/loguru-0.7.3.dist-info/INSTALLER b/venv/lib/python3.12/site-packages/loguru-0.7.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e3..00000000 --- a/venv/lib/python3.12/site-packages/loguru-0.7.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.12/site-packages/loguru-0.7.3.dist-info/METADATA b/venv/lib/python3.12/site-packages/loguru-0.7.3.dist-info/METADATA deleted file mode 100644 index 09505abe..00000000 --- a/venv/lib/python3.12/site-packages/loguru-0.7.3.dist-info/METADATA +++ /dev/null @@ -1,462 +0,0 @@ -Metadata-Version: 2.3 -Name: loguru -Version: 0.7.3 -Summary: Python logging made (stupidly) simple -Keywords: loguru,logging,logger,log -Author-email: Delgan -Requires-Python: >=3.5,<4.0 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Topic :: System :: Logging -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: Implementation :: CPython -Requires-Dist: colorama>=0.3.4 ; sys_platform=='win32' -Requires-Dist: aiocontextvars>=0.2.0 ; python_version<'3.7' -Requires-Dist: win32-setctime>=1.0.0 ; sys_platform=='win32' -Requires-Dist: pre-commit==4.0.1 ; extra == "dev" and ( python_version>='3.9') -Requires-Dist: tox==3.27.1 ; extra == "dev" and ( python_version<'3.8') -Requires-Dist: tox==4.23.2 ; extra == "dev" and ( python_version>='3.8') -Requires-Dist: pytest==6.1.2 ; extra == "dev" and ( python_version<'3.8') -Requires-Dist: pytest==8.3.2 ; extra == "dev" and ( python_version>='3.8') -Requires-Dist: pytest-cov==2.12.1 ; extra == "dev" and ( python_version<'3.8') -Requires-Dist: pytest-cov==5.0.0 ; extra == "dev" and ( python_version>='3.8' and python_version<'3.9') -Requires-Dist: pytest-cov==6.0.0 ; extra == "dev" and ( python_version>='3.9') -Requires-Dist: pytest-mypy-plugins==1.9.3 ; extra == "dev" and ( python_version>='3.6' and python_version<'3.8') -Requires-Dist: pytest-mypy-plugins==3.1.0 ; extra == "dev" and ( python_version>='3.8') -Requires-Dist: colorama==0.4.5 ; extra == "dev" and ( python_version<'3.8') -Requires-Dist: colorama==0.4.6 ; extra == "dev" and ( python_version>='3.8') -Requires-Dist: freezegun==1.1.0 ; extra == "dev" and ( python_version<'3.8') -Requires-Dist: freezegun==1.5.0 ; extra == "dev" and ( python_version>='3.8') -Requires-Dist: exceptiongroup==1.1.3 ; extra == "dev" and ( python_version>='3.7' and python_version<'3.11') -Requires-Dist: mypy==v0.910 ; extra == "dev" and ( python_version<'3.6') -Requires-Dist: mypy==v0.971 ; extra == "dev" and ( python_version>='3.6' and python_version<'3.7') -Requires-Dist: mypy==v1.4.1 ; extra == "dev" and ( python_version>='3.7' and python_version<'3.8') -Requires-Dist: mypy==v1.13.0 ; extra == "dev" and ( python_version>='3.8') -Requires-Dist: Sphinx==8.1.3 ; extra == "dev" and ( python_version>='3.11') -Requires-Dist: sphinx-rtd-theme==3.0.2 ; extra == "dev" and ( python_version>='3.11') -Requires-Dist: myst-parser==4.0.0 ; extra == "dev" and ( python_version>='3.11') -Requires-Dist: build==1.2.2 ; extra == "dev" and ( python_version>='3.11') -Requires-Dist: twine==6.0.1 ; extra == "dev" and ( python_version>='3.11') -Project-URL: Changelog, https://github.com/Delgan/loguru/blob/master/CHANGELOG.rst -Project-URL: Documentation, https://loguru.readthedocs.io/en/stable/index.html -Project-URL: Homepage, https://github.com/Delgan/loguru -Provides-Extra: dev - -

- - Loguru logo - - - -

-

- Pypi version - Python versions - Documentation - Build status - Coverage - Code quality - License -

-

- - Loguru logo - -

- -______________________________________________________________________ - -**Loguru** is a library which aims to bring enjoyable logging in Python. - -Did you ever feel lazy about configuring a logger and used `print()` instead?... I did, yet logging is fundamental to every application and eases the process of debugging. Using **Loguru** you have no excuse not to use logging from the start, this is as simple as `from loguru import logger`. - -Also, this library is intended to make Python logging less painful by adding a bunch of useful functionalities that solve caveats of the standard loggers. Using logs in your application should be an automatism, **Loguru** tries to make it both pleasant and powerful. - - - -## Installation - -``` -pip install loguru -``` - -## Features - -- [Ready to use out of the box without boilerplate](#ready-to-use-out-of-the-box-without-boilerplate) -- [No Handler, no Formatter, no Filter: one function to rule them all](#no-handler-no-formatter-no-filter-one-function-to-rule-them-all) -- [Easier file logging with rotation / retention / compression](#easier-file-logging-with-rotation--retention--compression) -- [Modern string formatting using braces style](#modern-string-formatting-using-braces-style) -- [Exceptions catching within threads or main](#exceptions-catching-within-threads-or-main) -- [Pretty logging with colors](#pretty-logging-with-colors) -- [Asynchronous, Thread-safe, Multiprocess-safe](#asynchronous-thread-safe-multiprocess-safe) -- [Fully descriptive exceptions](#fully-descriptive-exceptions) -- [Structured logging as needed](#structured-logging-as-needed) -- [Lazy evaluation of expensive functions](#lazy-evaluation-of-expensive-functions) -- [Customizable levels](#customizable-levels) -- [Better datetime handling](#better-datetime-handling) -- [Suitable for scripts and libraries](#suitable-for-scripts-and-libraries) -- [Entirely compatible with standard logging](#entirely-compatible-with-standard-logging) -- [Personalizable defaults through environment variables](#personalizable-defaults-through-environment-variables) -- [Convenient parser](#convenient-parser) -- [Exhaustive notifier](#exhaustive-notifier) -- [10x faster than built-in logging](#10x-faster-than-built-in-logging) - -## Take the tour - -### Ready to use out of the box without boilerplate - -The main concept of Loguru is that **there is one and only one** [`logger`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger). - -For convenience, it is pre-configured and outputs to `stderr` to begin with (but that's entirely configurable). - -```python -from loguru import logger - -logger.debug("That's it, beautiful and simple logging!") -``` - -The [`logger`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger) is just an interface which dispatches log messages to configured handlers. Simple, right? - -### No Handler, no Formatter, no Filter: one function to rule them all - -How to add a handler? How to set up logs formatting? How to filter messages? How to set level? - -One answer: the [`add()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.add) function. - -```python -logger.add(sys.stderr, format="{time} {level} {message}", filter="my_module", level="INFO") -``` - -This function should be used to register [sinks](https://loguru.readthedocs.io/en/stable/api/logger.html#sink) which are responsible for managing [log messages](https://loguru.readthedocs.io/en/stable/api/logger.html#message) contextualized with a [record dict](https://loguru.readthedocs.io/en/stable/api/logger.html#record). A sink can take many forms: a simple function, a string path, a file-like object, a coroutine function or a built-in Handler. - -Note that you may also a previously added handler by using the identifier returned while adding it. This is particularly useful if you want to supersede the default `stderr` handler: just call [`logger.remove()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.remove) to make a fresh start. - -### Easier file logging with rotation / retention / compression - -If you want to send logged messages to a file, you just have to use a string path as the sink. It can be automatically timed too for convenience: - -```python -logger.add("file_{time}.log") -``` - -It is also [easily configurable](https://loguru.readthedocs.io/en/stable/api/logger.html#file) if you need rotating logger, if you want to remove older logs, or if you wish to compress your files at closure. - -```python -logger.add("file_1.log", rotation="500 MB") # Automatically rotate too big file -logger.add("file_2.log", rotation="12:00") # New file is created each day at noon -logger.add("file_3.log", rotation="1 week") # Once the file is too old, it's rotated - -logger.add("file_X.log", retention="10 days") # Cleanup after some time - -logger.add("file_Y.log", compression="zip") # Save some loved space -``` - -### Modern string formatting using braces style - -Loguru favors the much more elegant and powerful `{}` formatting over `%`, logging functions are actually equivalent to `str.format()`. - -```python -logger.info("If you're using Python {}, prefer {feature} of course!", 3.6, feature="f-strings") -``` - -### Exceptions catching within threads or main - -Have you ever seen your program crashing unexpectedly without seeing anything in the log file? Did you ever notice that exceptions occurring in threads were not logged? This can be solved using the [`catch()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.catch) decorator / context manager which ensures that any error is correctly propagated to the [`logger`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger). - -```python -@logger.catch -def my_function(x, y, z): - # An error? It's caught anyway! - return 1 / (x + y + z) -``` - -### Pretty logging with colors - -Loguru automatically adds colors to your logs if your terminal is compatible. You can define your favorite style by using [markup tags](https://loguru.readthedocs.io/en/stable/api/logger.html#color) in the sink format. - -```python -logger.add(sys.stdout, colorize=True, format="{time} {message}") -``` - -### Asynchronous, Thread-safe, Multiprocess-safe - -All sinks added to the [`logger`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger) are thread-safe by default. They are not multiprocess-safe, but you can `enqueue` the messages to ensure logs integrity. This same argument can also be used if you want async logging. - -```python -logger.add("somefile.log", enqueue=True) -``` - -Coroutine functions used as sinks are also supported and should be awaited with [`complete()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.complete). - -### Fully descriptive exceptions - -Logging exceptions that occur in your code is important to track bugs, but it's quite useless if you don't know why it failed. Loguru helps you identify problems by allowing the entire stack trace to be displayed, including values of variables (thanks [`better_exceptions`](https://github.com/Qix-/better-exceptions) for this!). - -The code: - -```python -# Caution, "diagnose=True" is the default and may leak sensitive data in prod -logger.add("out.log", backtrace=True, diagnose=True) - -def func(a, b): - return a / b - -def nested(c): - try: - func(5, c) - except ZeroDivisionError: - logger.exception("What?!") - -nested(0) -``` - -Would result in: - -```none -2018-07-17 01:38:43.975 | ERROR | __main__:nested:10 - What?! -Traceback (most recent call last): - - File "test.py", line 12, in - nested(0) - └ - -> File "test.py", line 8, in nested - func(5, c) - │ └ 0 - └ - - File "test.py", line 4, in func - return a / b - │ └ 0 - └ 5 - -ZeroDivisionError: division by zero -``` - -Note that this feature won't work on default Python REPL due to unavailable frame data. - -See also: [Security considerations when using Loguru](https://loguru.readthedocs.io/en/stable/resources/recipes.html#security-considerations-when-using-loguru). - -### Structured logging as needed - -Want your logs to be serialized for easier parsing or to pass them around? Using the `serialize` argument, each log message will be converted to a JSON string before being sent to the configured sink. - -```python -logger.add(custom_sink_function, serialize=True) -``` - -Using [`bind()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.bind) you can contextualize your logger messages by modifying the `extra` record attribute. - -```python -logger.add("file.log", format="{extra[ip]} {extra[user]} {message}") -context_logger = logger.bind(ip="192.168.0.1", user="someone") -context_logger.info("Contextualize your logger easily") -context_logger.bind(user="someone_else").info("Inline binding of extra attribute") -context_logger.info("Use kwargs to add context during formatting: {user}", user="anybody") -``` - -It is possible to modify a context-local state temporarily with [`contextualize()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.contextualize): - -```python -with logger.contextualize(task=task_id): - do_something() - logger.info("End of task") -``` - -You can also have more fine-grained control over your logs by combining [`bind()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.bind) and `filter`: - -```python -logger.add("special.log", filter=lambda record: "special" in record["extra"]) -logger.debug("This message is not logged to the file") -logger.bind(special=True).info("This message, though, is logged to the file!") -``` - -Finally, the [`patch()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.patch) method allows dynamic values to be attached to the record dict of each new message: - -```python -logger.add(sys.stderr, format="{extra[utc]} {message}") -logger = logger.patch(lambda record: record["extra"].update(utc=datetime.utcnow())) -``` - -### Lazy evaluation of expensive functions - -Sometime you would like to log verbose information without performance penalty in production, you can use the [`opt()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.opt) method to achieve this. - -```python -logger.opt(lazy=True).debug("If sink level <= DEBUG: {x}", x=lambda: expensive_function(2**64)) - -# By the way, "opt()" serves many usages -logger.opt(exception=True).info("Error stacktrace added to the log message (tuple accepted too)") -logger.opt(colors=True).info("Per message colors") -logger.opt(record=True).info("Display values from the record (eg. {record[thread]})") -logger.opt(raw=True).info("Bypass sink formatting\n") -logger.opt(depth=1).info("Use parent stack context (useful within wrapped functions)") -logger.opt(capture=False).info("Keyword arguments not added to {dest} dict", dest="extra") -``` - -### Customizable levels - -Loguru comes with all standard [logging levels](https://loguru.readthedocs.io/en/stable/api/logger.html#levels) to which [`trace()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.trace) and [`success()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.success) are added. Do you need more? Then, just create it by using the [`level()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.level) function. - -```python -new_level = logger.level("SNAKY", no=38, color="", icon="🐍") - -logger.log("SNAKY", "Here we go!") -``` - -### Better datetime handling - -The standard logging is bloated with arguments like `datefmt` or `msecs`, `%(asctime)s` and `%(created)s`, naive datetimes without timezone information, not intuitive formatting, etc. Loguru [fixes it](https://loguru.readthedocs.io/en/stable/api/logger.html#time): - -```python -logger.add("file.log", format="{time:YYYY-MM-DD at HH:mm:ss} | {level} | {message}") -``` - -### Suitable for scripts and libraries - -Using the logger in your scripts is easy, and you can [`configure()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.configure) it at start. To use Loguru from inside a library, remember to never call [`add()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.add) but use [`disable()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.disable) instead so logging functions become no-op. If a developer wishes to see your library's logs, they can [`enable()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.enable) it again. - -```python -# For scripts -config = { - "handlers": [ - {"sink": sys.stdout, "format": "{time} - {message}"}, - {"sink": "file.log", "serialize": True}, - ], - "extra": {"user": "someone"} -} -logger.configure(**config) - -# For libraries, should be your library's `__name__` -logger.disable("my_library") -logger.info("No matter added sinks, this message is not displayed") - -# In your application, enable the logger in the library -logger.enable("my_library") -logger.info("This message however is propagated to the sinks") -``` - -For additional convenience, you can also use the [`loguru-config`](https://github.com/erezinman/loguru-config) library to setup the `logger` directly from a configuration file. - -### Entirely compatible with standard logging - -Wish to use built-in logging `Handler` as a Loguru sink? - -```python -handler = logging.handlers.SysLogHandler(address=('localhost', 514)) -logger.add(handler) -``` - -Need to propagate Loguru messages to standard `logging`? - -```python -class PropagateHandler(logging.Handler): - def emit(self, record: logging.LogRecord) -> None: - logging.getLogger(record.name).handle(record) - -logger.add(PropagateHandler(), format="{message}") -``` - -Want to intercept standard `logging` messages toward your Loguru sinks? - -```python -class InterceptHandler(logging.Handler): - def emit(self, record: logging.LogRecord) -> None: - # Get corresponding Loguru level if it exists. - level: str | int - try: - level = logger.level(record.levelname).name - except ValueError: - level = record.levelno - - # Find caller from where originated the logged message. - frame, depth = inspect.currentframe(), 0 - while frame and (depth == 0 or frame.f_code.co_filename == logging.__file__): - frame = frame.f_back - depth += 1 - - logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage()) - -logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True) -``` - -### Personalizable defaults through environment variables - -Don't like the default logger formatting? Would prefer another `DEBUG` color? [No problem](https://loguru.readthedocs.io/en/stable/api/logger.html#env): - -```bash -# Linux / OSX -export LOGURU_FORMAT="{time} | {message}" - -# Windows -setx LOGURU_DEBUG_COLOR "" -``` - -### Convenient parser - -It is often useful to extract specific information from generated logs, this is why Loguru provides a [`parse()`](https://loguru.readthedocs.io/en/stable/api/logger.html#loguru._logger.Logger.parse) method which helps to deal with logs and regexes. - -```python -pattern = r"(?P